_meta_registrations.py 1.58 KB
Newer Older
1
2
import functools

Edward Z. Yang's avatar
Edward Z. Yang committed
3
4
5
6
7
8
9
import torch
import torch.library

# Ensure that torch.ops.torchvision is visible
import torchvision.extension  # noqa: F401


10
11
12
@functools.lru_cache(None)
def get_meta_lib():
    return torch.library.Library("torchvision", "IMPL", "Meta")
Edward Z. Yang's avatar
Edward Z. Yang committed
13
14


15
def register_meta(op_name, overload_name="default"):
Edward Z. Yang's avatar
Edward Z. Yang committed
16
    def wrapper(fn):
17
18
        if torchvision.extension._has_ops():
            get_meta_lib().impl(getattr(getattr(torch.ops.torchvision, op_name), overload_name), fn)
Edward Z. Yang's avatar
Edward Z. Yang committed
19
20
21
22
23
        return fn

    return wrapper


24
@register_meta("roi_align")
Edward Z. Yang's avatar
Edward Z. Yang committed
25
def meta_roi_align(input, rois, spatial_scale, pooled_height, pooled_width, sampling_ratio, aligned):
26
27
    torch._check(rois.size(1) == 5, lambda: "rois must have shape as Tensor[K, 5]")
    torch._check(
Edward Z. Yang's avatar
Edward Z. Yang committed
28
29
30
31
32
33
34
35
36
37
38
        input.dtype == rois.dtype,
        lambda: (
            "Expected tensor for input to have the same type as tensor for rois; "
            f"but type {input.dtype} does not equal {rois.dtype}"
        ),
    )
    num_rois = rois.size(0)
    _, channels, height, width = input.size()
    return input.new_empty((num_rois, channels, pooled_height, pooled_width))


39
@register_meta("_roi_align_backward")
Edward Z. Yang's avatar
Edward Z. Yang committed
40
41
42
def meta_roi_align_backward(
    grad, rois, spatial_scale, pooled_height, pooled_width, batch_size, channels, height, width, sampling_ratio, aligned
):
43
    torch._check(
Edward Z. Yang's avatar
Edward Z. Yang committed
44
45
46
47
48
49
50
        grad.dtype == rois.dtype,
        lambda: (
            "Expected tensor for grad to have the same type as tensor for rois; "
            f"but type {grad.dtype} does not equal {rois.dtype}"
        ),
    )
    return grad.new_empty((batch_size, channels, height, width))