sparse.py 9.89 KB
Newer Older
rusty1s's avatar
rusty1s committed
1
import warnings
rusty1s's avatar
rusty1s committed
2
import inspect
rusty1s's avatar
rusty1s committed
3
from textwrap import indent
rusty1s's avatar
rusty1s committed
4

rusty1s's avatar
rusty1s committed
5
6
import torch

rusty1s's avatar
rusty1s committed
7
8
9
10
11
from torch_sparse.storage import SparseStorage

methods = list(zip(*inspect.getmembers(SparseStorage)))[0]
methods = [name for name in methods if '__' not in name and name != 'clone']

rusty1s's avatar
rusty1s committed
12

rusty1s's avatar
rusty1s committed
13
14
15
16
def __is_scalar__(x):
    return isinstance(x, int) or isinstance(x, float)


rusty1s's avatar
rusty1s committed
17
18
19
class SparseTensor(object):
    def __init__(self, index, value=None, sparse_size=None, is_sorted=False):
        assert index.dim() == 2 and index.size(0) == 2
rusty1s's avatar
rusty1s committed
20
21
        self._storage = SparseStorage(index[0], index[1], value, sparse_size,
                                      is_sorted=is_sorted)
rusty1s's avatar
rusty1s committed
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38

    @classmethod
    def from_storage(self, storage):
        self = SparseTensor.__new__(SparseTensor)
        self._storage = storage
        return self

    @classmethod
    def from_dense(self, mat):
        if mat.dim() > 2:
            index = mat.abs().sum([i for i in range(2, mat.dim())]).nonzero()
        else:
            index = mat.nonzero()

        index = index.t().contiguous()
        value = mat[index[0], index[1]]
        return SparseTensor(index, value, mat.size()[:2], is_sorted=True)
rusty1s's avatar
rusty1s committed
39

rusty1s's avatar
rusty1s committed
40
41
42
    @property
    def _storage(self):
        return self.__storage
rusty1s's avatar
rusty1s committed
43

rusty1s's avatar
rusty1s committed
44
45
46
47
48
    @_storage.setter
    def _storage(self, storage):
        self.__storage = storage
        for name in methods:
            setattr(self, name, getattr(storage, name))
rusty1s's avatar
rusty1s committed
49

rusty1s's avatar
rusty1s committed
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
    def clone(self):
        return SparseTensor.from_storage(self._storage.clone())

    def __copy__(self):
        return self.clone()

    def __deepcopy__(self, memo):
        memo = memo.setdefault('SparseStorage', {})
        if self._cdata in memo:
            return memo[self._cdata]
        new_sparse_tensor = self.clone()
        memo[self._cdata] = new_sparse_tensor
        return new_sparse_tensor

    def coo(self):
        return self._index, self._value

    def csr(self):
rusty1s's avatar
rusty1s committed
68
        return self._rowptr, self._col, self._value
rusty1s's avatar
rusty1s committed
69
70
71

    def csc(self):
        perm = self._arg_csr_to_csc
rusty1s's avatar
rusty1s committed
72
        return self._colptr, self._row[perm], self._value[perm]
rusty1s's avatar
rusty1s committed
73
74
75
76
77
78
79
80
81
82
83
84
85
86

    def is_quadratic(self):
        return self.sparse_size[0] == self.sparse_size[1]

    def is_symmetric(self):
        if not self.is_quadratic:
            return False

        index1, value1 = self.coo()
        index2, value2 = self.t().coo()
        index_symmetric = (index1 == index2).all()
        value_symmetric = (value1 == value2).all() if self.has_value else True
        return index_symmetric and value_symmetric

rusty1s's avatar
rusty1s committed
87
88
89
90
91
92
    def set_value(self, value, layout=None):
        if layout is None:
            layout = 'coo'
            warnings.warn('`layout` argument unset, using default layout '
                          '"coo". This may lead to unexpected behaviour.')
        assert layout in ['coo', 'csr', 'csc']
rusty1s's avatar
rusty1s committed
93
94
95
96
        if value is not None and layout == 'csc':
            value = value[self._arg_csc_to_csr]
        return self._apply_value(value)

rusty1s's avatar
rusty1s committed
97
98
99
100
101
102
    def set_value_(self, value, layout=None):
        if layout is None:
            layout = 'coo'
            warnings.warn('`layout` argument unset, using default layout '
                          '"coo". This may lead to unexpected behaviour.')
        assert layout in ['coo', 'csr', 'csc']
rusty1s's avatar
rusty1s committed
103
104
105
106
        if value is not None and layout == 'csc':
            value = value[self._arg_csc_to_csr]
        return self._apply_value_(value)

rusty1s's avatar
rusty1s committed
107
108
109
    def set_diag(self, value):
        raise NotImplementedError

rusty1s's avatar
rusty1s committed
110
111
112
113
114
115
116
117
118
119
120
121
122
    def t(self):
        storage = SparseStorage(
            self._col[self._arg_csr_to_csc],
            self._row[self._arg_csr_to_csc],
            self._value[self._arg_csr_to_csc] if self.has_value else None,
            self.sparse_size()[::-1],
            self._colptr,
            self._rowptr,
            self._arg_csc_to_csr,
            self._arg_csr_to_csc,
            is_sorted=True,
        )
        return self.__class__.from_storage(storage)
rusty1s's avatar
rusty1s committed
123

rusty1s's avatar
rusty1s committed
124
    def coalesce(self, reduce='add'):
rusty1s's avatar
rusty1s committed
125
        raise NotImplementedError
rusty1s's avatar
rusty1s committed
126

rusty1s's avatar
rusty1s committed
127
    def is_coalesced(self):
rusty1s's avatar
rusty1s committed
128
        raise NotImplementedError
rusty1s's avatar
rusty1s committed
129

rusty1s's avatar
rusty1s committed
130
131
132
133
    def masked_select(self, mask):
        raise NotImplementedError

    def index_select(self, index):
rusty1s's avatar
rusty1s committed
134
135
        raise NotImplementedError

rusty1s's avatar
rusty1s committed
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
    def select(self, dim, index):
        raise NotImplementedError

    def filter(self, index):
        assert self.is_symmetric
        assert index.dtype == torch.long or index.dtype == torch.bool
        raise NotImplementedError

    def permute(self, index):
        assert index.dtype == torch.long
        return self.filter(index)

    def __getitem__(self, idx):
        # Convert int and slice to index tensor
        # Filter list into edge and sparse slice
        raise NotImplementedError

    def __reduce(self, dim, reduce, only_nnz):
        raise NotImplementedError

    def sum(self, dim):
        return self.__reduce(dim, reduce='add', only_nnz=True)

    def prod(self, dim):
        return self.__reduce(dim, reduce='mul', only_nnz=True)

    def min(self, dim, only_nnz=False):
        return self.__reduce(dim, reduce='min', only_nnz=only_nnz)

    def max(self, dim, only_nnz=False):
        return self.__reduce(dim, reduce='min', only_nnz=only_nnz)

    def mean(self, dim, only_nnz=False):
        return self.__reduce(dim, reduce='mean', only_nnz=only_nnz)

    def matmul(self, mat, reduce='add'):
        assert self.numel() == self.nnz()  # Disallow multi-dimensional value
        if torch.is_tensor(mat):
            raise NotImplementedError
        elif isinstance(mat, self.__class__):
            assert reduce == 'add'
            assert mat.numel() == mat.nnz()  # Disallow multi-dimensional value
            raise NotImplementedError
        raise ValueError('Argument needs to be of type `torch.tensor` or '
                         'type `torch_sparse.SparseTensor`.')

    def add(self, other, layout=None):
        if __is_scalar__(other):
            if self.has_value:
                return self.set_value(self._value + other, 'coo')
            else:
rusty1s's avatar
rusty1s committed
187
188
                return self.set_value(torch.full((self.nnz(), ), other + 1),
                                      'coo')
rusty1s's avatar
rusty1s committed
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
        elif torch.is_tensor(other):
            if layout is None:
                layout = 'coo'
                warnings.warn('`layout` argument unset, using default layout '
                              '"coo". This may lead to unexpected behaviour.')
            assert layout in ['coo', 'csr', 'csc']
            if layout == 'csc':
                other = other[self._arg_csc_to_csr]
            if self.has_value:
                return self.set_value(self._value + other, 'coo')
            else:
                return self.set_value(other + 1, 'coo')
        elif isinstance(other, self.__class__):
            raise NotImplementedError
        raise ValueError('Argument needs to be of type `int`, `float`, '
                         '`torch.tensor` or `torch_sparse.SparseTensor`.')

    def add_(self, other, layout=None):
        if isinstance(other, int) or isinstance(other, float):
            raise NotImplementedError
        elif torch.is_tensor(other):
            raise NotImplementedError
        raise ValueError('Argument needs to be a scalar or of type '
                         '`torch.tensor`.')

    def __add__(self, other):
        return self.add(other)

    def __radd__(self, other):
        return self.add(other)

    def sub(self, layout=None):
        raise NotImplementedError

    def sub_(self, layout=None):
        raise NotImplementedError

    def mul(self, layout=None):
        raise NotImplementedError

    def mul_(self, layout=None):
        raise NotImplementedError

    def div(self, layout=None):
        raise NotImplementedError

    def div_(self, layout=None):
        raise NotImplementedError
rusty1s's avatar
rusty1s committed
237

rusty1s's avatar
rusty1s committed
238
239
240
    def to_dense(self, dtype=None):
        dtype = dtype or self.dtype
        mat = torch.zeros(self.size(), dtype=dtype, device=self.device)
rusty1s's avatar
rusty1s committed
241
        mat[self._row, self._col] = self._value if self.has_value else 1
rusty1s's avatar
rusty1s committed
242
        return mat
rusty1s's avatar
rusty1s committed
243

rusty1s's avatar
rusty1s committed
244
    def to_scipy(self, layout):
rusty1s's avatar
rusty1s committed
245
        raise NotImplementedError
rusty1s's avatar
rusty1s committed
246

rusty1s's avatar
rusty1s committed
247
248
249
250
251
    def to_torch_sparse_coo_tensor(self, dtype=None, requires_grad=False):
        index, value = self.coo()
        return torch.sparse_coo_tensor(
            index,
            torch.ones_like(self._row, dtype) if value is None else value,
rusty1s's avatar
rusty1s committed
252
            self.size(), device=self.device, requires_grad=requires_grad)
rusty1s's avatar
rusty1s committed
253
254
255

    def __repr__(self):
        i = ' ' * 6
rusty1s's avatar
rusty1s committed
256
257
258
259
260
261
262
263
264
        index, value = self.coo()
        infos = [f'index={indent(index.__repr__(), i)[len(i):]}']
        if value is not None:
            infos += [f'value={indent(value.__repr__(), i)[len(i):]}']
        infos += [
            f'size={tuple(self.size())}, '
            f'nnz={self.nnz()}, '
            f'density={100 * self.density():.02f}%'
        ]
rusty1s's avatar
rusty1s committed
265
266
267
268
269
270
271
        infos = ',\n'.join(infos)

        i = ' ' * (len(self.__class__.__name__) + 1)
        return f'{self.__class__.__name__}({indent(infos, i)[len(i):]})'


if __name__ == '__main__':
rusty1s's avatar
rusty1s committed
272
273
    from torch_geometric.datasets import Reddit, Planetoid  # noqa
    import time  # noqa
rusty1s's avatar
rusty1s committed
274

rusty1s's avatar
rusty1s committed
275
276
277
278
    device = 'cuda' if torch.cuda.is_available() else 'cpu'
    device = 'cpu'

    # dataset = Reddit('/tmp/Reddit')
rusty1s's avatar
rusty1s committed
279
280
    dataset = Planetoid('/tmp/Cora', 'Cora')
    # dataset = Planetoid('/tmp/PubMed', 'PubMed')
rusty1s's avatar
rusty1s committed
281
282
283
284
285
286
287
288
289
    data = dataset[0].to(device)

    _bytes = data.edge_index.numel() * 8
    _kbytes = _bytes / 1024
    _mbytes = _kbytes / 1024
    _gbytes = _mbytes / 1024
    print(f'Storage: {_gbytes:.04f} GB')

    mat1 = SparseTensor(data.edge_index)
rusty1s's avatar
rusty1s committed
290
    print(mat1)
rusty1s's avatar
rusty1s committed
291
292
    mat1 = mat1.t()

rusty1s's avatar
rusty1s committed
293
294
    mat2 = torch.sparse_coo_tensor(data.edge_index, torch.ones(data.num_edges),
                                   device=device)
rusty1s's avatar
rusty1s committed
295
296
    mat2 = mat2.coalesce()
    mat2 = mat2.t().coalesce()
rusty1s's avatar
rusty1s committed
297

rusty1s's avatar
rusty1s committed
298
299
300
301
302
303
304
305
    index1, value1 = mat1.coo()
    index2, value2 = mat2._indices(), mat2._values()
    assert torch.allclose(index1, index2)

    out1 = mat1.to_dense()
    out2 = mat2.to_dense()
    assert torch.allclose(out1, out2)

rusty1s's avatar
rusty1s committed
306
307
308
309
310
311
312
313
314
    out = 2 + mat1
    print(out)

    # mat1[1]
    # mat1[1, 1]
    # mat1[..., -1]
    # mat1[:, -1]
    # mat1[1:4, 1:4]
    # mat1[torch.tensor([0, 1, 2])]