pointnet2_head.py 3.14 KB
Newer Older
dingchang's avatar
dingchang committed
1
# Copyright (c) OpenMMLab. All rights reserved.
2
3
from typing import Tuple

4
from mmcv.cnn.bricks import ConvModule
5
from torch import Tensor
6
7
from torch import nn as nn

zhangshilong's avatar
zhangshilong committed
8
from mmdet3d.models.layers import PointFPModule
9
from mmdet3d.registry import MODELS
zhangshilong's avatar
zhangshilong committed
10
from mmdet3d.utils.typing import ConfigType
11
12
13
from .decode_head import Base3DDecodeHead


14
@MODELS.register_module()
15
16
17
18
19
20
21
22
class PointNet2Head(Base3DDecodeHead):
    r"""PointNet2 decoder head.

    Decoder head used in `PointNet++ <https://arxiv.org/abs/1706.02413>`_.
    Refer to the `official code <https://github.com/charlesq34/pointnet2>`_.

    Args:
        fp_channels (tuple[tuple[int]]): Tuple of mlp channels in FP modules.
23
        fp_norm_cfg (dict): Config of norm layers used in FP modules.
24
            Default: dict(type='BN2d').
25
26
27
    """

    def __init__(self,
28
29
30
31
32
33
                 fp_channels: Tuple[Tuple[int]] = ((768, 256, 256),
                                                   (384, 256, 256), (320, 256,
                                                                     128),
                                                   (128, 128, 128, 128)),
                 fp_norm_cfg: ConfigType = dict(type='BN2d'),
                 **kwargs) -> None:
34
35
36
37
38
        super(PointNet2Head, self).__init__(**kwargs)

        self.num_fp = len(fp_channels)
        self.FP_modules = nn.ModuleList()
        for cur_fp_mlps in fp_channels:
39
40
            self.FP_modules.append(
                PointFPModule(mlp_channels=cur_fp_mlps, norm_cfg=fp_norm_cfg))
41
42
43
44
45
46
47
48
49
50
51

        # https://github.com/charlesq34/pointnet2/blob/master/models/pointnet2_sem_seg.py#L40
        self.pre_seg_conv = ConvModule(
            fp_channels[-1][-1],
            self.channels,
            kernel_size=1,
            bias=True,
            conv_cfg=self.conv_cfg,
            norm_cfg=self.norm_cfg,
            act_cfg=self.act_cfg)

52
    def _extract_input(self, feat_dict: dict) -> Tensor:
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
        """Extract inputs from features dictionary.

        Args:
            feat_dict (dict): Feature dict from backbone.

        Returns:
            list[torch.Tensor]: Coordinates of multiple levels of points.
            list[torch.Tensor]: Features of multiple levels of points.
        """
        sa_xyz = feat_dict['sa_xyz']
        sa_features = feat_dict['sa_features']
        assert len(sa_xyz) == len(sa_features)

        return sa_xyz, sa_features

68
    def forward(self, feat_dict: dict) -> Tensor:
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
        """Forward pass.

        Args:
            feat_dict (dict): Feature dict from backbone.

        Returns:
            torch.Tensor: Segmentation map of shape [B, num_classes, N].
        """
        sa_xyz, sa_features = self._extract_input(feat_dict)

        # https://github.com/charlesq34/pointnet2/blob/master/models/pointnet2_sem_seg.py#L24
        sa_features[0] = None

        fp_feature = sa_features[-1]

        for i in range(self.num_fp):
            # consume the points in a bottom-up manner
            fp_feature = self.FP_modules[i](sa_xyz[-(i + 2)], sa_xyz[-(i + 1)],
                                            sa_features[-(i + 2)], fp_feature)
        output = self.pre_seg_conv(fp_feature)
        output = self.cls_seg(output)

        return output