"tests/experimental/vscode:/vscode.git/clone" did not exist on "8f8f8ef986913ee13c78bf0f066451a5ac62686a"
storage.py 7.84 KB
Newer Older
rusty1s's avatar
rusty1s committed
1
import warnings
rusty1s's avatar
rusty1s committed
2

rusty1s's avatar
rusty1s committed
3
4
5
6
import torch
from torch_scatter import scatter_add, segment_add


rusty1s's avatar
rusty1s committed
7
8
def optional(func, src):
    return func(src) if src is not None else src
rusty1s's avatar
rusty1s committed
9
10


rusty1s's avatar
rusty1s committed
11
12
13
class cached_property(object):
    def __init__(self, func):
        self.func = func
rusty1s's avatar
sorting  
rusty1s committed
14

rusty1s's avatar
rusty1s committed
15
16
17
18
19
20
21
22
23
    def __get__(self, obj, cls):
        value = getattr(obj, f'_{self.func.__name__}', None)
        if value is None:
            value = self.func(obj)
            setattr(obj, f'_{self.func.__name__}', value)
        return value


class SparseStorage(object):
rusty1s's avatar
rusty1s committed
24
    layouts = ['coo', 'csr', 'csc']
rusty1s's avatar
rusty1s committed
25
26
    cache_keys = ['rowptr', 'colptr', 'csr_to_csc', 'csc_to_csr']

rusty1s's avatar
rusty1s committed
27
28
29
30
31
32
33
34
    def __init__(self,
                 index,
                 value=None,
                 sparse_size=None,
                 rowptr=None,
                 colptr=None,
                 csr_to_csc=None,
                 csc_to_csr=None,
rusty1s's avatar
rusty1s committed
35
36
37
38
                 is_sorted=False):

        assert index.dtype == torch.long
        assert index.dim() == 2 and index.size(0) == 2
rusty1s's avatar
rusty1s committed
39
40

        if value is not None:
rusty1s's avatar
rusty1s committed
41
42
            assert value.device == index.device
            assert value.size(0) == index.size(1)
rusty1s's avatar
rusty1s committed
43
44
            value = value.contiguous()

rusty1s's avatar
rusty1s committed
45
46
47
48
49
        if sparse_size is None:
            sparse_size = torch.Size((index.max(dim=-1)[0] + 1).tolist())

        if rowptr is not None:
            assert rowptr.dtype == torch.long and rowptr.device == index.device
rusty1s's avatar
rusty1s committed
50
            assert rowptr.dim() == 1 and rowptr.numel() - 1 == sparse_size[0]
rusty1s's avatar
rusty1s committed
51

rusty1s's avatar
rusty1s committed
52
53
        if colptr is not None:
            assert colptr.dtype == torch.long and colptr.device == index.device
rusty1s's avatar
rusty1s committed
54
            assert colptr.dim() == 1 and colptr.numel() - 1 == sparse_size[1]
rusty1s's avatar
rusty1s committed
55

rusty1s's avatar
rusty1s committed
56
57
58
59
60
        if csr_to_csc is not None:
            assert csr_to_csc.dtype == torch.long
            assert csr_to_csc.device == index.device
            assert csr_to_csc.dim() == 1
            assert csr_to_csc.numel() == index.size(1)
rusty1s's avatar
rusty1s committed
61

rusty1s's avatar
rusty1s committed
62
63
64
65
66
        if csc_to_csr is not None:
            assert csc_to_csr.dtype == torch.long
            assert csc_to_csr.device == index.device
            assert csc_to_csr.dim() == 1
            assert csc_to_csr.numel() == index.size(1)
rusty1s's avatar
rusty1s committed
67

rusty1s's avatar
rusty1s committed
68
69
70
71
72
73
74
75
76
77
78
        if not is_sorted:
            idx = sparse_size[1] * index[0] + index[1]
            # Only sort if necessary...
            if (idx <= torch.cat([idx.new_zeros(1), idx[:-1]], dim=0)).any():
                perm = idx.argsort()
                index = index[:, perm]
                value = None if value is None else value[perm]
                rowptr = None
                colptr = None
                csr_to_csc = None
                csc_to_csr = None
rusty1s's avatar
rusty1s committed
79

rusty1s's avatar
rusty1s committed
80
81
82
83
84
85
86
        self._index = index
        self._value = value
        self._sparse_size = sparse_size
        self._rowptr = rowptr
        self._colptr = colptr
        self._csr_to_csc = csr_to_csc
        self._csc_to_csr = csc_to_csr
rusty1s's avatar
rusty1s committed
87
88

    @property
rusty1s's avatar
rusty1s committed
89
90
    def index(self):
        return self._index
rusty1s's avatar
rusty1s committed
91
92

    @property
rusty1s's avatar
rusty1s committed
93
94
    def row(self):
        return self._index[0]
rusty1s's avatar
rusty1s committed
95
96

    @property
rusty1s's avatar
rusty1s committed
97
98
    def col(self):
        return self._index[1]
rusty1s's avatar
rusty1s committed
99

rusty1s's avatar
rusty1s committed
100
101
    def has_value(self):
        return self._value is not None
rusty1s's avatar
rusty1s committed
102
103

    @property
rusty1s's avatar
rusty1s committed
104
105
106
107
108
109
110
111
    def value(self):
        return self._value

    def set_value_(self, value, layout=None):
        if layout is None:
            layout = 'coo'
            warnings.warn('`layout` argument unset, using default layout '
                          '"coo". This may lead to unexpected behaviour.')
rusty1s's avatar
rusty1s committed
112
        assert layout in self.layouts
rusty1s's avatar
rusty1s committed
113
114
115
116
117
118
119
120
121
122
123
        assert value.device == self._index.device
        assert value.size(0) == self._index.size(1)
        if value is not None and layout == 'csc':
            value = value[self.csc_to_csr]
        return self.apply_value_(lambda x: value)

    def set_value(self, value, layout=None):
        if layout is None:
            layout = 'coo'
            warnings.warn('`layout` argument unset, using default layout '
                          '"coo". This may lead to unexpected behaviour.')
rusty1s's avatar
rusty1s committed
124
        assert layout in self.layouts
rusty1s's avatar
rusty1s committed
125
126
127
128
129
        assert value.device == self._index.device
        assert value.size(0) == self._index.size(1)
        if value is not None and layout == 'csc':
            value = value[self.csc_to_csr]
        return self.apply_value(lambda x: value)
rusty1s's avatar
rusty1s committed
130
131

    def sparse_size(self, dim=None):
rusty1s's avatar
rusty1s committed
132
        return self._sparse_size if dim is None else self._sparse_size[dim]
rusty1s's avatar
rusty1s committed
133
134
135

    def sparse_resize_(self, *sizes):
        assert len(sizes) == 2
rusty1s's avatar
rusty1s committed
136
        self._sparse_size == sizes
rusty1s's avatar
rusty1s committed
137
        return self
rusty1s's avatar
rusty1s committed
138

rusty1s's avatar
rusty1s committed
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
    @cached_property
    def rowptr(self):
        row = self.row
        ones = torch.ones_like(row)
        out_deg = segment_add(ones, row, dim=0, dim_size=self._sparse_size[0])
        return torch.cat([row.new_zeros(1), out_deg.cumsum(0)], dim=0)

    @cached_property
    def colptr(self):
        col = self.col
        ones = torch.ones_like(col)
        in_deg = scatter_add(ones, col, dim=0, dim_size=self._sparse_size[1])
        return torch.cat([col.new_zeros(1), in_deg.cumsum(0)], dim=0)

    @cached_property
    def csr_to_csc(self):
        idx = self._sparse_size[0] * self.col + self.row
        return idx.argsort()

    @cached_property
    def csc_to_csr(self):
        return self.csr_to_csc.argsort()

rusty1s's avatar
rusty1s committed
162
163
164
165
166
167
168
    def is_coalesced(self):
        raise NotImplementedError

    def coalesce(self):
        raise NotImplementedError

    def fill_cache_(self, *args):
rusty1s's avatar
rusty1s committed
169
170
        for arg in args or self.cache_keys:
            getattr(self, arg)
rusty1s's avatar
rusty1s committed
171
        return self
rusty1s's avatar
rusty1s committed
172

rusty1s's avatar
rusty1s committed
173
174
175
176
    def clear_cache_(self, *args):
        for arg in args or self.cache_keys:
            setattr(self, f'_{arg}', None)
        return self
rusty1s's avatar
rusty1s committed
177

rusty1s's avatar
rusty1s committed
178
179
180
    def __copy__(self):
        return self.apply(lambda x: x)

rusty1s's avatar
test  
rusty1s committed
181
182
183
184
185
    def clone(self):
        return self.apply(lambda x: x.clone())

    def __deepcopy__(self, memo):
        new_storage = self.clone()
rusty1s's avatar
rusty1s committed
186
        memo[id(self)] = new_storage
rusty1s's avatar
test  
rusty1s committed
187
188
        return new_storage

rusty1s's avatar
rusty1s committed
189
190
    def apply_value_(self, func):
        self._value = optional(func, self._value)
rusty1s's avatar
rusty1s committed
191
        return self
rusty1s's avatar
rusty1s committed
192

rusty1s's avatar
rusty1s committed
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
    def apply_value(self, func):
        return self.__class__(
            self._index,
            optional(func, self._value),
            self._sparse_size,
            self._rowptr,
            self._colptr,
            self._csr_to_csc,
            self._csc_to_csr,
            is_sorted=True,
        )

    def apply_(self, func):
        self._index = func(self._index)
        self._value = optional(func, self._value)
        for key in self.cache_keys:
            setattr(self, f'_{key}', optional(func, getattr(self, f'_{key}')))
rusty1s's avatar
rusty1s committed
210
        return self
rusty1s's avatar
rusty1s committed
211
212
213
214
215
216
217
218
219
220
221
222
223

    def apply(self, func):
        return self.__class__(
            func(self._index),
            optional(func, self._value),
            self._sparse_size,
            optional(func, self._rowptr),
            optional(func, self._colptr),
            optional(func, self._csr_to_csc),
            optional(func, self._csc_to_csr),
            is_sorted=True,
        )

rusty1s's avatar
rusty1s committed
224
225
226
227
228
229
230
231
232
233
    def map(self, func):
        data = [func(self.index)]
        if self.has_value():
            data += [func(self.value)]
        data += [
            func(getattr(self, f'_{key}')) for key in self.cache_keys
            if getattr(self, f'_{key}')
        ]
        return data

rusty1s's avatar
rusty1s committed
234
235

if __name__ == '__main__':
rusty1s's avatar
test  
rusty1s committed
236
    from torch_geometric.datasets import Reddit, Planetoid  # noqa
rusty1s's avatar
rusty1s committed
237
    import time  # noqa
rusty1s's avatar
test  
rusty1s committed
238
    import copy  # noqa
rusty1s's avatar
rusty1s committed
239
240

    device = 'cuda' if torch.cuda.is_available() else 'cpu'
rusty1s's avatar
test  
rusty1s committed
241
242
    # dataset = Reddit('/tmp/Reddit')
    dataset = Planetoid('/tmp/Cora', 'Cora')
rusty1s's avatar
rusty1s committed
243
244
    data = dataset[0].to(device)
    edge_index = data.edge_index
rusty1s's avatar
sorting  
rusty1s committed
245

rusty1s's avatar
rusty1s committed
246
247
    storage = SparseStorage(edge_index, is_sorted=True)
    t = time.perf_counter()
rusty1s's avatar
rusty1s committed
248
    storage.fill_cache_()
rusty1s's avatar
rusty1s committed
249
250
    print(time.perf_counter() - t)
    t = time.perf_counter()
rusty1s's avatar
test  
rusty1s committed
251
    storage.clear_cache_()
rusty1s's avatar
rusty1s committed
252
    storage.fill_cache_()
rusty1s's avatar
rusty1s committed
253
    print(time.perf_counter() - t)
rusty1s's avatar
test  
rusty1s committed
254
    print(storage)
rusty1s's avatar
rusty1s committed
255
    # storage = storage.clone()
rusty1s's avatar
test  
rusty1s committed
256
    # print(storage)
rusty1s's avatar
rusty1s committed
257
258
259
260
261
262
    storage = copy.copy(storage)
    print(storage)
    print(id(storage))
    storage = copy.deepcopy(storage)
    print(storage)
    storage.fill_cache_()
rusty1s's avatar
test  
rusty1s committed
263
    storage.clear_cache_()