misc.py 3.26 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
"""
helper class that supports empty tensors on some nn functions.

Ideally, add support directly in PyTorch to empty tensors in
those functions.

This can be removed once https://github.com/pytorch/pytorch/issues/12013
is implemented
"""

11
import warnings
12
import torch
13
14
from torch import Tensor, Size
from torch.jit.annotations import List, Optional, Tuple
eellison's avatar
eellison committed
15
16


17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
class Conv2d(torch.nn.Conv2d):
    def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)
        warnings.warn(
            "torchvision.ops.misc.Conv2d is deprecated and will be "
            "removed in future versions, use torch.nn.Conv2d instead.", FutureWarning)


class ConvTranspose2d(torch.nn.ConvTranspose2d):
    def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)
        warnings.warn(
            "torchvision.ops.misc.ConvTranspose2d is deprecated and will be "
            "removed in future versions, use torch.nn.ConvTranspose2d instead.", FutureWarning)


class BatchNorm2d(torch.nn.BatchNorm2d):
    def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)
        warnings.warn(
            "torchvision.ops.misc.BatchNorm2d is deprecated and will be "
            "removed in future versions, use torch.nn.BatchNorm2d instead.", FutureWarning)


41
interpolate = torch.nn.functional.interpolate
42
43
44


# This is not in nn
45
class FrozenBatchNorm2d(torch.nn.Module):
46
47
48
49
50
    """
    BatchNorm2d where the batch statistics and the affine parameters
    are fixed
    """

51
52
    def __init__(
        self,
53
        num_features: int,
54
        eps: float = 0.,
55
        n: Optional[int] = None,
56
    ):
57
58
59
60
61
        # n=None for backward-compatibility
        if n is not None:
            warnings.warn("`n` argument is deprecated and has been renamed `num_features`",
                          DeprecationWarning)
            num_features = n
62
        super(FrozenBatchNorm2d, self).__init__()
63
64
65
66
67
        self.eps = eps
        self.register_buffer("weight", torch.ones(num_features))
        self.register_buffer("bias", torch.zeros(num_features))
        self.register_buffer("running_mean", torch.zeros(num_features))
        self.register_buffer("running_var", torch.ones(num_features))
68

69
70
71
72
73
74
75
76
77
78
    def _load_from_state_dict(
        self,
        state_dict: dict,
        prefix: str,
        local_metadata: dict,
        strict: bool,
        missing_keys: List[str],
        unexpected_keys: List[str],
        error_msgs: List[str],
    ):
79
80
81
82
83
84
85
86
        num_batches_tracked_key = prefix + 'num_batches_tracked'
        if num_batches_tracked_key in state_dict:
            del state_dict[num_batches_tracked_key]

        super(FrozenBatchNorm2d, self)._load_from_state_dict(
            state_dict, prefix, local_metadata, strict,
            missing_keys, unexpected_keys, error_msgs)

87
    def forward(self, x: Tensor) -> Tensor:
88
89
90
91
92
93
        # move reshapes to the beginning
        # to make it fuser-friendly
        w = self.weight.reshape(1, -1, 1, 1)
        b = self.bias.reshape(1, -1, 1, 1)
        rv = self.running_var.reshape(1, -1, 1, 1)
        rm = self.running_mean.reshape(1, -1, 1, 1)
94
        scale = w * (rv + self.eps).rsqrt()
95
96
        bias = b - rm * scale
        return x * scale + bias
97

98
    def __repr__(self) -> str:
99
        return f"{self.__class__.__name__}({self.weight.shape[0]}, eps={self.eps})"