utils.py 1.13 KB
Newer Older
rusty1s's avatar
rusty1s committed
1
2
3
import torch
from torch_unique import unique

rusty1s's avatar
rusty1s committed
4
5
6
7
from .._ext import ffi


def get_func(name, tensor):
8
9
10
11
12
    cuda = '_cuda' if tensor.is_cuda else ''
    return getattr(ffi, 'cluster_{}{}'.format(name, cuda))


def get_dynamic_func(name, tensor):
rusty1s's avatar
rusty1s committed
13
14
    typename = type(tensor).__name__.replace('Tensor', '')
    cuda = 'cuda_' if tensor.is_cuda else ''
15
    return getattr(ffi, 'cluster_{}_{}{}'.format(name, cuda, typename))
rusty1s's avatar
rusty1s committed
16
17


18
19
def get_type(max, cuda):
    if max <= 255:
rusty1s's avatar
rusty1s committed
20
        return torch.cuda.ByteTensor if cuda else torch.ByteTensor
21
    elif max <= 32767:  # pragma: no cover
rusty1s's avatar
rusty1s committed
22
        return torch.cuda.ShortTensor if cuda else torch.ShortTensor
23
    elif max <= 2147483647:  # pragma: no cover
rusty1s's avatar
rusty1s committed
24
        return torch.cuda.IntTensor if cuda else torch.IntTensor
rusty1s's avatar
rusty1s committed
25
    else:  # pragma: no cover
rusty1s's avatar
rusty1s committed
26
27
28
        return torch.cuda.LongTensor if cuda else torch.LongTensor


rusty1s's avatar
rusty1s committed
29
def consecutive(tensor):
rusty1s's avatar
rusty1s committed
30
31
    size = tensor.size()
    u = unique(tensor.view(-1))
rusty1s's avatar
rusty1s committed
32
33
34
35
36
    len = u[-1] + 1
    max = u.size(0)
    type = get_type(max, tensor.is_cuda)
    arg = type(len)
    arg[u] = torch.arange(0, max, out=type(max))
rusty1s's avatar
rusty1s committed
37
    tensor = arg[tensor.view(-1)]
rusty1s's avatar
rusty1s committed
38
    return tensor.view(size).long(), u