_utils.py 3.12 KB
Newer Older
1
from collections import OrderedDict
2
from typing import Dict, Optional
3
4
5
6

from torch import nn


eellison's avatar
eellison committed
7
class IntermediateLayerGetter(nn.ModuleDict):
8
9
10
11
12
13
    """
    Module wrapper that returns intermediate layers from a model

    It has a strong assumption that the modules have been registered
    into the model in the same order as they are used.
    This means that one should **not** reuse the same nn.Module
14
15
16
17
18
19
    twice in the forward if you want this to work.

    Additionally, it is only able to query submodules that are directly
    assigned to the model. So if `model` is passed, `model.feature1` can
    be returned, but not `model.feature1.layer2`.

20
    Args:
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
        model (nn.Module): model on which we will extract the features
        return_layers (Dict[name, new_name]): a dict containing the names
            of the modules for which the activations will be returned as
            the key of the dict, and the value of the dict is the name
            of the returned activation (which the user can specify).

    Examples::

        >>> m = torchvision.models.resnet18(pretrained=True)
        >>> # extract layer1 and layer3, giving as names `feat1` and feat2`
        >>> new_m = torchvision.models._utils.IntermediateLayerGetter(m,
        >>>     {'layer1': 'feat1', 'layer3': 'feat2'})
        >>> out = new_m(torch.rand(1, 3, 224, 224))
        >>> print([(k, v.shape) for k, v in out.items()])
        >>>     [('feat1', torch.Size([1, 64, 56, 56])),
        >>>      ('feat2', torch.Size([1, 256, 14, 14]))]
37
    """
38

eellison's avatar
eellison committed
39
40
41
42
43
    _version = 2
    __annotations__ = {
        "return_layers": Dict[str, str],
    }

44
    def __init__(self, model: nn.Module, return_layers: Dict[str, str]) -> None:
45
46
47
        if not set(return_layers).issubset([name for name, _ in model.named_children()]):
            raise ValueError("return_layers are not present in model")
        orig_return_layers = return_layers
eellison's avatar
eellison committed
48
        return_layers = {str(k): str(v) for k, v in return_layers.items()}
49
50
51
52
53
54
55
56
        layers = OrderedDict()
        for name, module in model.named_children():
            layers[name] = module
            if name in return_layers:
                del return_layers[name]
            if not return_layers:
                break

57
        super().__init__(layers)
58
59
60
61
        self.return_layers = orig_return_layers

    def forward(self, x):
        out = OrderedDict()
eellison's avatar
eellison committed
62
        for name, module in self.items():
63
64
65
66
67
            x = module(x)
            if name in self.return_layers:
                out_name = self.return_layers[name]
                out[out_name] = x
        return out
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83


def _make_divisible(v: float, divisor: int, min_value: Optional[int] = None) -> int:
    """
    This function is taken from the original tf repo.
    It ensures that all layers have a channel number that is divisible by 8
    It can be seen here:
    https://github.com/tensorflow/models/blob/master/research/slim/nets/mobilenet/mobilenet.py
    """
    if min_value is None:
        min_value = divisor
    new_v = max(min_value, int(v + divisor / 2) // divisor * divisor)
    # Make sure that round down does not go down by more than 10%.
    if new_v < 0.9 * v:
        new_v += divisor
    return new_v