utils.py 810 Bytes
Newer Older
Chen Sirui's avatar
Chen Sirui committed
1
import numpy as np
2
import scipy.sparse as sparse
Chen Sirui's avatar
Chen Sirui committed
3
import torch
4
5
6
import torch.nn as nn

import dgl
Chen Sirui's avatar
Chen Sirui committed
7
8
9
10
11
12
13
14
15


class NormalizationLayer(nn.Module):
    def __init__(self, mean, std):
        self.mean = mean
        self.std = std

    # Here we shall expect mean and std be scaler
    def normalize(self, x):
16
        return (x - self.mean) / self.std
Chen Sirui's avatar
Chen Sirui committed
17
18

    def denormalize(self, x):
19
        return x * self.std + self.mean
Chen Sirui's avatar
Chen Sirui committed
20
21
22
23
24
25
26
27
28
29
30
31
32
33


def masked_mae_loss(y_pred, y_true):
    mask = (y_true != 0).float()
    mask /= mask.mean()
    loss = torch.abs(y_pred - y_true)
    loss = loss * mask
    # trick for nans: https://discuss.pytorch.org/t/how-to-set-nan-in-tensor-to-0/3918/3
    loss[loss != loss] = 0
    return loss.mean()


def get_learning_rate(optimizer):
    for param in optimizer.param_groups:
34
        return param["lr"]