utils.py 4.38 KB
Newer Older
1
import numpy as np
2
import torch
3

4
from dgl.sparse import from_coo, from_csc, from_csr, SparseMatrix
5

6
7
8
np.random.seed(42)
torch.random.manual_seed(42)

9
10
11
12
13
14
15

def clone_detach_and_grad(t):
    t = t.clone().detach()
    t.requires_grad_()
    return t


16
def rand_coo(shape, nnz, dev, nz_dim=None):
17
18
19
20
21
    # Create a sparse matrix without duplicate entries.
    nnzid = np.random.choice(shape[0] * shape[1], nnz, replace=False)
    nnzid = torch.tensor(nnzid, device=dev).long()
    row = torch.div(nnzid, shape[1], rounding_mode="floor")
    col = nnzid % shape[1]
22
23
24
25
    if nz_dim is None:
        val = torch.randn(nnz, device=dev, requires_grad=True)
    else:
        val = torch.randn(nnz, nz_dim, device=dev, requires_grad=True)
26
    return from_coo(row, col, val, shape)
27
28


29
def rand_csr(shape, nnz, dev, nz_dim=None):
30
31
32
33
34
    # Create a sparse matrix without duplicate entries.
    nnzid = np.random.choice(shape[0] * shape[1], nnz, replace=False)
    nnzid = torch.tensor(nnzid, device=dev).long()
    row = torch.div(nnzid, shape[1], rounding_mode="floor")
    col = nnzid % shape[1]
35
36
37
38
    if nz_dim is None:
        val = torch.randn(nnz, device=dev, requires_grad=True)
    else:
        val = torch.randn(nnz, nz_dim, device=dev, requires_grad=True)
39
40
41
42
    indptr = torch.zeros(shape[0] + 1, device=dev, dtype=torch.int64)
    for r in row.tolist():
        indptr[r + 1] += 1
    indptr = torch.cumsum(indptr, 0)
43
44
    row_sorted, row_sorted_idx = torch.sort(row)
    indices = col[row_sorted_idx]
45
    return from_csr(indptr, indices, val, shape=shape)
46
47


48
def rand_csc(shape, nnz, dev, nz_dim=None):
49
50
51
52
53
    # Create a sparse matrix without duplicate entries.
    nnzid = np.random.choice(shape[0] * shape[1], nnz, replace=False)
    nnzid = torch.tensor(nnzid, device=dev).long()
    row = torch.div(nnzid, shape[1], rounding_mode="floor")
    col = nnzid % shape[1]
54
55
56
57
    if nz_dim is None:
        val = torch.randn(nnz, device=dev, requires_grad=True)
    else:
        val = torch.randn(nnz, nz_dim, device=dev, requires_grad=True)
58
59
60
61
62
63
    indptr = torch.zeros(shape[1] + 1, device=dev, dtype=torch.int64)
    for c in col.tolist():
        indptr[c + 1] += 1
    indptr = torch.cumsum(indptr, 0)
    col_sorted, col_sorted_idx = torch.sort(col)
    indices = row[col_sorted_idx]
64
    return from_csc(indptr, indices, val, shape=shape)
65
66
67
68
69
70
71


def rand_coo_uncoalesced(shape, nnz, dev):
    # Create a sparse matrix with possible duplicate entries.
    row = torch.randint(shape[0], (nnz,), device=dev)
    col = torch.randint(shape[1], (nnz,), device=dev)
    val = torch.randn(nnz, device=dev, requires_grad=True)
72
    return from_coo(row, col, val, shape)
73
74
75
76
77
78
79
80
81
82
83
84
85


def rand_csr_uncoalesced(shape, nnz, dev):
    # Create a sparse matrix with possible duplicate entries.
    row = torch.randint(shape[0], (nnz,), device=dev)
    col = torch.randint(shape[1], (nnz,), device=dev)
    val = torch.randn(nnz, device=dev, requires_grad=True)
    indptr = torch.zeros(shape[0] + 1, device=dev, dtype=torch.int64)
    for r in row.tolist():
        indptr[r + 1] += 1
    indptr = torch.cumsum(indptr, 0)
    row_sorted, row_sorted_idx = torch.sort(row)
    indices = col[row_sorted_idx]
86
    return from_csr(indptr, indices, val, shape=shape)
87
88
89
90
91
92


def rand_csc_uncoalesced(shape, nnz, dev):
    # Create a sparse matrix with possible duplicate entries.
    row = torch.randint(shape[0], (nnz,), device=dev)
    col = torch.randint(shape[1], (nnz,), device=dev)
93
94
95
96
97
    val = torch.randn(nnz, device=dev, requires_grad=True)
    indptr = torch.zeros(shape[1] + 1, device=dev, dtype=torch.int64)
    for c in col.tolist():
        indptr[c + 1] += 1
    indptr = torch.cumsum(indptr, 0)
98
99
    col_sorted, col_sorted_idx = torch.sort(col)
    indices = row[col_sorted_idx]
100
    return from_csc(indptr, indices, val, shape=shape)
101
102
103


def sparse_matrix_to_dense(A: SparseMatrix):
104
    dense = A.to_dense()
105
    return clone_detach_and_grad(dense)
106
107


108
def sparse_matrix_to_torch_sparse(A: SparseMatrix, val=None):
109
110
111
    row, col = A.coo()
    edge_index = torch.cat((row.unsqueeze(0), col.unsqueeze(0)), 0)
    shape = A.shape
112
113
114
    if val is None:
        val = A.val
    val = val.clone().detach()
115
116
117
118
119
    if len(A.val.shape) > 1:
        shape += (A.val.shape[-1],)
    ret = torch.sparse_coo_tensor(edge_index, val, shape).coalesce()
    ret.requires_grad_()
    return ret
120
121
122
123
124
125
126
127


def dense_mask(dense, sparse):
    ret = torch.zeros_like(dense)
    row, col = sparse.coo()
    for r, c in zip(row, col):
        ret[r, c] = dense[r, c]
    return ret