storage.py 8.51 KB
Newer Older
rusty1s's avatar
rusty1s committed
1
2
import inspect

rusty1s's avatar
rusty1s committed
3
4
5
6
7
8
9
10
11
12
13
14
15
16
import torch
from torch import Size
from torch_scatter import scatter_add, segment_add


class SparseStorage(object):
    def __init__(self, row, col, value=None, sparse_size=None, rowptr=None,
                 colptr=None, arg_csr_to_csc=None, arg_csc_to_csr=None,
                 is_sorted=False):

        assert row.dtype == torch.long and col.dtype == torch.long
        assert row.device == row.device
        assert row.dim() == 1 and col.dim() == 1 and row.numel() == col.numel()

rusty1s's avatar
sorting  
rusty1s committed
17
18
19
        if sparse_size is None:
            sparse_size = Size((row.max().item() + 1, col.max().item() + 1))

rusty1s's avatar
rusty1s committed
20
        if not is_sorted:
rusty1s's avatar
sorting  
rusty1s committed
21
22
23
24
25
26
27
28
29
30
31
            idx = sparse_size[1] * row + col
            # Only sort if necessary...
            if (idx <= torch.cat([idx.new_zeros(1), idx[:-1]], dim=0)).any():
                perm = idx.argsort()
                row = row[perm]
                col = col[perm]
                value = None if value is None else value[perm]
                rowptr = None
                colptr = None
                arg_csr_to_csc = None
                arg_csc_to_csr = None
rusty1s's avatar
rusty1s committed
32
33
34
35
36
37
38
39

        if value is not None:
            assert row.device == value.device and value.size(0) == row.size(0)
            value = value.contiguous()

        ones = None
        if rowptr is None:
            ones = torch.ones_like(row)
rusty1s's avatar
sorting  
rusty1s committed
40
41
            out_deg = segment_add(ones, row, dim=0, dim_size=sparse_size[0])
            rowptr = torch.cat([row.new_zeros(1), out_deg.cumsum(0)], dim=0)
rusty1s's avatar
rusty1s committed
42
43
        else:
            assert rowptr.dtype == torch.long and rowptr.device == row.device
rusty1s's avatar
rusty1s committed
44
            assert rowptr.dim() == 1 and rowptr.numel() - 1 == sparse_size[0]
rusty1s's avatar
rusty1s committed
45
46
47

        if colptr is None:
            ones = torch.ones_like(col) if ones is None else ones
rusty1s's avatar
sorting  
rusty1s committed
48
49
            in_deg = scatter_add(ones, col, dim=0, dim_size=sparse_size[1])
            colptr = torch.cat([col.new_zeros(1), in_deg.cumsum(0)], dim=0)
rusty1s's avatar
rusty1s committed
50
51
        else:
            assert colptr.dtype == torch.long and colptr.device == col.device
rusty1s's avatar
rusty1s committed
52
            assert colptr.dim() == 1 and colptr.numel() - 1 == sparse_size[1]
rusty1s's avatar
rusty1s committed
53
54
55
56

        if arg_csr_to_csc is None:
            idx = sparse_size[0] * col + row
            arg_csr_to_csc = idx.argsort()
rusty1s's avatar
rusty1s committed
57
        else:
rusty1s's avatar
rusty1s committed
58
            assert arg_csr_to_csc.dtype == torch.long
rusty1s's avatar
rusty1s committed
59
60
            assert arg_csr_to_csc.device == row.device
            assert arg_csr_to_csc.dim() == 1
rusty1s's avatar
rusty1s committed
61
            assert arg_csr_to_csc.numel() == row.numel()
rusty1s's avatar
rusty1s committed
62

rusty1s's avatar
rusty1s committed
63
        if arg_csc_to_csr is None:
rusty1s's avatar
rusty1s committed
64
            arg_csc_to_csr = arg_csr_to_csc.argsort()
rusty1s's avatar
rusty1s committed
65
        else:
rusty1s's avatar
rusty1s committed
66
            assert arg_csc_to_csr.dtype == torch.long
rusty1s's avatar
rusty1s committed
67
68
            assert arg_csc_to_csr.device == row.device
            assert arg_csc_to_csr.dim() == 1
rusty1s's avatar
rusty1s committed
69
            assert arg_csc_to_csr.numel() == row.numel()
rusty1s's avatar
rusty1s committed
70
71
72
73
74
75
76
77
78
79
80

        self.__row = row
        self.__col = col
        self.__value = value
        self.__sparse_size = sparse_size
        self.__rowptr = rowptr
        self.__colptr = colptr
        self.__arg_csr_to_csc = arg_csr_to_csc
        self.__arg_csc_to_csr = arg_csc_to_csr

    @property
rusty1s's avatar
rusty1s committed
81
    def _row(self):
rusty1s's avatar
rusty1s committed
82
83
84
        return self.__row

    @property
rusty1s's avatar
rusty1s committed
85
    def _col(self):
rusty1s's avatar
rusty1s committed
86
87
        return self.__col

rusty1s's avatar
rusty1s committed
88
    @property
rusty1s's avatar
rusty1s committed
89
    def _index(self):
rusty1s's avatar
rusty1s committed
90
91
92
        return torch.stack([self.__row, self.__col], dim=0)

    @property
rusty1s's avatar
rusty1s committed
93
    def _rowptr(self):
rusty1s's avatar
rusty1s committed
94
95
96
        return self.__rowptr

    @property
rusty1s's avatar
rusty1s committed
97
    def _colptr(self):
rusty1s's avatar
rusty1s committed
98
99
100
        return self.__colptr

    @property
rusty1s's avatar
rusty1s committed
101
    def _arg_csr_to_csc(self):
rusty1s's avatar
rusty1s committed
102
103
104
        return self.__arg_csr_to_csc

    @property
rusty1s's avatar
rusty1s committed
105
    def _arg_csc_to_csr(self):
rusty1s's avatar
rusty1s committed
106
107
108
        return self.__arg_csc_to_csr

    @property
rusty1s's avatar
rusty1s committed
109
    def _value(self):
rusty1s's avatar
rusty1s committed
110
111
112
113
114
115
116
117
118
119
120
        return self.__value

    @property
    def has_value(self):
        return self.__value is not None

    def sparse_size(self, dim=None):
        return self.__sparse_size if dim is None else self.__sparse_size[dim]

    def size(self, dim=None):
        size = self.__sparse_size
rusty1s's avatar
rusty1s committed
121
        size += () if self.__value is None else self.__value.size()[1:]
rusty1s's avatar
rusty1s committed
122
123
        return size if dim is None else size[dim]

rusty1s's avatar
rusty1s committed
124
125
126
    def dim(self):
        return len(self.size())

rusty1s's avatar
rusty1s committed
127
128
129
130
131
132
133
    @property
    def shape(self):
        return self.size()

    def sparse_resize_(self, *sizes):
        assert len(sizes) == 2
        self.__sparse_size == sizes
rusty1s's avatar
rusty1s committed
134
        return self
rusty1s's avatar
rusty1s committed
135

rusty1s's avatar
rusty1s committed
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
    def nnz(self):
        return self.__row.size(0)

    def density(self):
        return self.nnz() / (self.__sparse_size[0] * self.__sparse_size[1])

    def sparsity(self):
        return 1 - self.density()

    def avg_row_length(self):
        return self.nnz() / self.__sparse_size[0]

    def avg_col_length(self):
        return self.nnz() / self.__sparse_size[1]

    def numel(self):
        return self.nnz() if self.__value is None else self.__value.numel()

rusty1s's avatar
rusty1s committed
154
    def clone(self):
rusty1s's avatar
rusty1s committed
155
        return self._apply(lambda x: x.clone())
rusty1s's avatar
rusty1s committed
156

rusty1s's avatar
rusty1s committed
157
158
    def __copy__(self):
        return self.clone()
rusty1s's avatar
rusty1s committed
159

rusty1s's avatar
rusty1s committed
160
161
162
163
164
165
166
167
    def __deepcopy__(self, memo):
        memo = memo.setdefault('SparseStorage', {})
        if self._cdata in memo:
            return memo[self._cdata]
        new_storage = self.clone()
        memo[self._cdata] = new_storage
        return new_storage

rusty1s's avatar
rusty1s committed
168
    def pin_memory(self):
rusty1s's avatar
rusty1s committed
169
        return self._apply(lambda x: x.pin_memory())
rusty1s's avatar
rusty1s committed
170
171

    def is_pinned(self):
rusty1s's avatar
rusty1s committed
172
        return all([x.is_pinned for x in self.__attributes])
rusty1s's avatar
rusty1s committed
173
174

    def share_memory_(self):
rusty1s's avatar
rusty1s committed
175
        return self._apply_(lambda x: x.share_memory_())
rusty1s's avatar
rusty1s committed
176
177

    def is_shared(self):
rusty1s's avatar
rusty1s committed
178
        return all([x.is_shared for x in self.__attributes])
rusty1s's avatar
rusty1s committed
179
180
181
182
183
184

    @property
    def device(self):
        return self.__row.device

    def cpu(self):
rusty1s's avatar
rusty1s committed
185
        return self._apply(lambda x: x.cpu())
rusty1s's avatar
rusty1s committed
186

rusty1s's avatar
rusty1s committed
187
    def cuda(self, device=None, non_blocking=False, **kwargs):
rusty1s's avatar
rusty1s committed
188
        return self._apply(lambda x: x.cuda(device, non_blocking, **kwargs))
rusty1s's avatar
rusty1s committed
189
190
191

    @property
    def is_cuda(self):
rusty1s's avatar
rusty1s committed
192
        return self.__row.is_cuda
rusty1s's avatar
rusty1s committed
193
194
195

    @property
    def dtype(self):
rusty1s's avatar
rusty1s committed
196
197
198
199
        return None if self.__value is None else self.__value.dtype

    def to(self, *args, **kwargs):
        if 'device' in kwargs:
rusty1s's avatar
rusty1s committed
200
            out = self._apply(lambda x: x.to(kwargs['device'], **kwargs))
rusty1s's avatar
rusty1s committed
201
            del kwargs['device']
rusty1s's avatar
rusty1s committed
202

rusty1s's avatar
rusty1s committed
203
204
        for arg in args[:]:
            if isinstance(arg, str) or isinstance(arg, torch.device):
rusty1s's avatar
rusty1s committed
205
                out = self._apply(lambda x: x.to(arg, **kwargs))
rusty1s's avatar
rusty1s committed
206
                args.remove(arg)
rusty1s's avatar
rusty1s committed
207

rusty1s's avatar
rusty1s committed
208
209
210
211
212
213
        if len(args) > 0 and len(kwargs) > 0:
            out = self.type(*args, **kwargs)

        return out

    def type(self, dtype=None, non_blocking=False, **kwargs):
rusty1s's avatar
rusty1s committed
214
        return self.dtype if dtype is None else self._apply_value(
rusty1s's avatar
rusty1s committed
215
            lambda x: x.type(dtype, non_blocking, **kwargs))
rusty1s's avatar
rusty1s committed
216
217

    def is_floating_point(self):
rusty1s's avatar
rusty1s committed
218
        return self.__value is None or torch.is_floating_point(self.__value)
rusty1s's avatar
rusty1s committed
219
220

    def bfloat16(self):
rusty1s's avatar
rusty1s committed
221
        return self._apply_value(lambda x: x.bfloat16())
rusty1s's avatar
rusty1s committed
222
223

    def bool(self):
rusty1s's avatar
rusty1s committed
224
        return self._apply_value(lambda x: x.bool())
rusty1s's avatar
rusty1s committed
225
226

    def byte(self):
rusty1s's avatar
rusty1s committed
227
        return self._apply_value(lambda x: x.byte())
rusty1s's avatar
rusty1s committed
228
229

    def char(self):
rusty1s's avatar
rusty1s committed
230
        return self._apply_value(lambda x: x.char())
rusty1s's avatar
rusty1s committed
231
232

    def half(self):
rusty1s's avatar
rusty1s committed
233
        return self._apply_value(lambda x: x.half())
rusty1s's avatar
rusty1s committed
234
235

    def float(self):
rusty1s's avatar
rusty1s committed
236
        return self._apply_value(lambda x: x.float())
rusty1s's avatar
rusty1s committed
237
238

    def double(self):
rusty1s's avatar
rusty1s committed
239
        return self._apply_value(lambda x: x.double())
rusty1s's avatar
rusty1s committed
240
241

    def short(self):
rusty1s's avatar
rusty1s committed
242
        return self._apply_value(lambda x: x.short())
rusty1s's avatar
rusty1s committed
243
244

    def int(self):
rusty1s's avatar
rusty1s committed
245
        return self._apply_value(lambda x: x.int())
rusty1s's avatar
rusty1s committed
246
247

    def long(self):
rusty1s's avatar
rusty1s committed
248
        return self._apply_value(lambda x: x.long())
rusty1s's avatar
rusty1s committed
249

rusty1s's avatar
rusty1s committed
250
251
252
    def __state(self):
        return {
            key: getattr(self, f'_{self.__class__.__name__}__{key}')
rusty1s's avatar
rusty1s committed
253
            for key in inspect.getfullargspec(self.__init__)[0][1:-1]
rusty1s's avatar
rusty1s committed
254
        }
rusty1s's avatar
rusty1s committed
255

rusty1s's avatar
rusty1s committed
256
257
258
259
    def _apply_value(self, func):
        if self.__value is None:
            return self

rusty1s's avatar
rusty1s committed
260
261
262
        state = self.__state()
        state['value'] == func(self.__value)
        return self.__class__(is_sorted=True, **state)
rusty1s's avatar
rusty1s committed
263

rusty1s's avatar
rusty1s committed
264
    def _apply_value_(self, func):
rusty1s's avatar
rusty1s committed
265
266
        self.__value = None if self.__value is None else func(self.__value)
        return self
rusty1s's avatar
rusty1s committed
267

rusty1s's avatar
rusty1s committed
268
269
270
    def _apply(self, func):
        state = self.__state().items()
        state = {k: func(v) if torch.is_tensor(v) else v for k, v in state}
rusty1s's avatar
rusty1s committed
271
        return self.__class__(is_sorted=True, **state)
rusty1s's avatar
rusty1s committed
272

rusty1s's avatar
rusty1s committed
273
274
275
276
277
    def _apply_(self, func):
        for k, v in self.__state().items():
            v = func(v) if torch.is_tensor(v) else v
            setattr(self, f'_{self.__class__.__name__}__{k}', v)
        return self
rusty1s's avatar
rusty1s committed
278
279
280
281
282
283
284
285
286
287
288
289


if __name__ == '__main__':
    from torch_geometric.datasets import Reddit  # noqa
    import time  # noqa

    device = 'cuda' if torch.cuda.is_available() else 'cpu'

    dataset = Reddit('/tmp/Reddit')
    data = dataset[0].to(device)
    edge_index = data.edge_index
    row, col = edge_index
rusty1s's avatar
sorting  
rusty1s committed
290
291

    storage = SparseStorage(row, col)