sampler.py 2.85 KB
Newer Older
WangYQ's avatar
WangYQ committed
1
2
3
import numpy as np
import torch as th

4
5
import dgl

WangYQ's avatar
WangYQ committed
6
7

class Sampler:
8
9
10
    def __init__(
        self, graph, walk_length, num_walks, window_size, num_negative
    ):
WangYQ's avatar
WangYQ committed
11
12
13
14
15
16
17
18
19
        self.graph = graph
        self.walk_length = walk_length
        self.num_walks = num_walks
        self.window_size = window_size
        self.num_negative = num_negative
        self.node_weights = self.compute_node_sample_weight()

    def sample(self, batch, sku_info):
        """
20
21
        Given a batch of target nodes, sample postive
        pairs and negative pairs from the graph
WangYQ's avatar
WangYQ committed
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
        """
        batch = np.repeat(batch, self.num_walks)

        pos_pairs = self.generate_pos_pairs(batch)
        neg_pairs = self.generate_neg_pairs(pos_pairs)

        # get sku info with id
        srcs, dsts, labels = [], [], []
        for pair in pos_pairs + neg_pairs:
            src, dst, label = pair
            src_info = sku_info[src]
            dst_info = sku_info[dst]

            srcs.append(src_info)
            dsts.append(dst_info)
            labels.append(label)

        return th.tensor(srcs), th.tensor(dsts), th.tensor(labels)

    def filter_padding(self, traces):
        for i in range(len(traces)):
            traces[i] = [x for x in traces[i] if x != -1]

    def generate_pos_pairs(self, nodes):
        """
47
48
49
        For seq [1, 2, 3, 4] and node NO.2,
        the window_size=1 will generate:
            (1, 2) and (2, 3)
WangYQ's avatar
WangYQ committed
50
51
52
        """
        # random walk
        traces, types = dgl.sampling.random_walk(
53
54
            g=self.graph, nodes=nodes, length=self.walk_length, prob="weight"
        )
WangYQ's avatar
WangYQ committed
55
56
57
58
59
60
61
62
63
64
65
        traces = traces.tolist()
        self.filter_padding(traces)

        # skip-gram
        pairs = []
        for trace in traces:
            for i in range(len(trace)):
                center = trace[i]
                left = max(0, i - self.window_size)
                right = min(len(trace), i + self.window_size + 1)
                pairs.extend([[center, x, 1] for x in trace[left:i]])
66
67
                pairs.extend([[center, x, 1] for x in trace[i + 1 : right]])

WangYQ's avatar
WangYQ committed
68
69
70
71
        return pairs

    def compute_node_sample_weight(self):
        """
72
        Using node degree as sample weight
WangYQ's avatar
WangYQ committed
73
74
75
76
77
        """
        return self.graph.in_degrees().float()

    def generate_neg_pairs(self, pos_pairs):
        """
78
79
80
        Sample based on node freq in traces, frequently shown
        nodes will have larger chance to be sampled as
        negative node.
WangYQ's avatar
WangYQ committed
81
        """
82
        # sample `self.num_negative` neg dst node
WangYQ's avatar
WangYQ committed
83
84
        # for each pos node pair's src node.
        negs = th.multinomial(
85
86
87
88
89
            self.node_weights,
            len(pos_pairs) * self.num_negative,
            replacement=True,
        ).tolist()

WangYQ's avatar
WangYQ committed
90
        tar = np.repeat([pair[0] for pair in pos_pairs], self.num_negative)
91
        assert len(tar) == len(negs)
WangYQ's avatar
WangYQ committed
92
93
94
        neg_pairs = [[x, y, 0] for x, y in zip(tar, negs)]

        return neg_pairs