utils.py 4.46 KB
Newer Older
1
import numpy as np
2
import torch
3

4
from dgl.sparse import (
5
6
7
8
9
10
    create_from_coo,
    create_from_csc,
    create_from_csr,
    SparseMatrix,
)

11
12
13
np.random.seed(42)
torch.random.manual_seed(42)

14
15
16
17
18
19
20

def clone_detach_and_grad(t):
    t = t.clone().detach()
    t.requires_grad_()
    return t


21
def rand_coo(shape, nnz, dev, nz_dim=None):
22
23
24
25
26
    # Create a sparse matrix without duplicate entries.
    nnzid = np.random.choice(shape[0] * shape[1], nnz, replace=False)
    nnzid = torch.tensor(nnzid, device=dev).long()
    row = torch.div(nnzid, shape[1], rounding_mode="floor")
    col = nnzid % shape[1]
27
28
29
30
    if nz_dim is None:
        val = torch.randn(nnz, device=dev, requires_grad=True)
    else:
        val = torch.randn(nnz, nz_dim, device=dev, requires_grad=True)
31
32
33
    return create_from_coo(row, col, val, shape)


34
def rand_csr(shape, nnz, dev, nz_dim=None):
35
36
37
38
39
    # Create a sparse matrix without duplicate entries.
    nnzid = np.random.choice(shape[0] * shape[1], nnz, replace=False)
    nnzid = torch.tensor(nnzid, device=dev).long()
    row = torch.div(nnzid, shape[1], rounding_mode="floor")
    col = nnzid % shape[1]
40
41
42
43
    if nz_dim is None:
        val = torch.randn(nnz, device=dev, requires_grad=True)
    else:
        val = torch.randn(nnz, nz_dim, device=dev, requires_grad=True)
44
45
46
47
    indptr = torch.zeros(shape[0] + 1, device=dev, dtype=torch.int64)
    for r in row.tolist():
        indptr[r + 1] += 1
    indptr = torch.cumsum(indptr, 0)
48
49
    row_sorted, row_sorted_idx = torch.sort(row)
    indices = col[row_sorted_idx]
50
51
52
    return create_from_csr(indptr, indices, val, shape=shape)


53
def rand_csc(shape, nnz, dev, nz_dim=None):
54
55
56
57
58
    # Create a sparse matrix without duplicate entries.
    nnzid = np.random.choice(shape[0] * shape[1], nnz, replace=False)
    nnzid = torch.tensor(nnzid, device=dev).long()
    row = torch.div(nnzid, shape[1], rounding_mode="floor")
    col = nnzid % shape[1]
59
60
61
62
    if nz_dim is None:
        val = torch.randn(nnz, device=dev, requires_grad=True)
    else:
        val = torch.randn(nnz, nz_dim, device=dev, requires_grad=True)
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
    indptr = torch.zeros(shape[1] + 1, device=dev, dtype=torch.int64)
    for c in col.tolist():
        indptr[c + 1] += 1
    indptr = torch.cumsum(indptr, 0)
    col_sorted, col_sorted_idx = torch.sort(col)
    indices = row[col_sorted_idx]
    return create_from_csc(indptr, indices, val, shape=shape)


def rand_coo_uncoalesced(shape, nnz, dev):
    # Create a sparse matrix with possible duplicate entries.
    row = torch.randint(shape[0], (nnz,), device=dev)
    col = torch.randint(shape[1], (nnz,), device=dev)
    val = torch.randn(nnz, device=dev, requires_grad=True)
    return create_from_coo(row, col, val, shape)


def rand_csr_uncoalesced(shape, nnz, dev):
    # Create a sparse matrix with possible duplicate entries.
    row = torch.randint(shape[0], (nnz,), device=dev)
    col = torch.randint(shape[1], (nnz,), device=dev)
    val = torch.randn(nnz, device=dev, requires_grad=True)
    indptr = torch.zeros(shape[0] + 1, device=dev, dtype=torch.int64)
    for r in row.tolist():
        indptr[r + 1] += 1
    indptr = torch.cumsum(indptr, 0)
    row_sorted, row_sorted_idx = torch.sort(row)
    indices = col[row_sorted_idx]
    return create_from_csr(indptr, indices, val, shape=shape)


def rand_csc_uncoalesced(shape, nnz, dev):
    # Create a sparse matrix with possible duplicate entries.
    row = torch.randint(shape[0], (nnz,), device=dev)
    col = torch.randint(shape[1], (nnz,), device=dev)
98
99
100
101
102
    val = torch.randn(nnz, device=dev, requires_grad=True)
    indptr = torch.zeros(shape[1] + 1, device=dev, dtype=torch.int64)
    for c in col.tolist():
        indptr[c + 1] += 1
    indptr = torch.cumsum(indptr, 0)
103
104
    col_sorted, col_sorted_idx = torch.sort(col)
    indices = row[col_sorted_idx]
105
106
107
108
109
    return create_from_csc(indptr, indices, val, shape=shape)


def sparse_matrix_to_dense(A: SparseMatrix):
    dense = A.dense()
110
    return clone_detach_and_grad(dense)
111
112


113
def sparse_matrix_to_torch_sparse(A: SparseMatrix, val=None):
114
115
116
    row, col = A.coo()
    edge_index = torch.cat((row.unsqueeze(0), col.unsqueeze(0)), 0)
    shape = A.shape
117
118
119
    if val is None:
        val = A.val
    val = val.clone().detach()
120
121
122
123
124
    if len(A.val.shape) > 1:
        shape += (A.val.shape[-1],)
    ret = torch.sparse_coo_tensor(edge_index, val, shape).coalesce()
    ret.requires_grad_()
    return ret
125
126
127
128
129
130
131
132


def dense_mask(dense, sparse):
    ret = torch.zeros_like(dense)
    row, col = sparse.coo()
    for r, c in zip(row, col):
        ret[r, c] = dense[r, c]
    return ret