Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
ModelZoo
SOLOv2-pytorch
Commits
ff6c19f0
Commit
ff6c19f0
authored
Apr 12, 2019
by
Kai Chen
Browse files
add an argument to support building P6 from either C5 or P5
parent
c17ed460
Changes
1
Show whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
11 additions
and
8 deletions
+11
-8
mmdet/models/necks/fpn.py
mmdet/models/necks/fpn.py
+11
-8
No files found.
mmdet/models/necks/fpn.py
View file @
ff6c19f0
...
...
@@ -16,6 +16,7 @@ class FPN(nn.Module):
start_level
=
0
,
end_level
=-
1
,
add_extra_convs
=
False
,
extra_convs_on_inputs
=
True
,
normalize
=
None
,
activation
=
None
):
super
(
FPN
,
self
).
__init__
()
...
...
@@ -38,6 +39,7 @@ class FPN(nn.Module):
self
.
start_level
=
start_level
self
.
end_level
=
end_level
self
.
add_extra_convs
=
add_extra_convs
self
.
extra_convs_on_inputs
=
extra_convs_on_inputs
self
.
lateral_convs
=
nn
.
ModuleList
()
self
.
fpn_convs
=
nn
.
ModuleList
()
...
...
@@ -64,16 +66,14 @@ class FPN(nn.Module):
self
.
lateral_convs
.
append
(
l_conv
)
self
.
fpn_convs
.
append
(
fpn_conv
)
# lvl_id = i - self.start_level
# setattr(self, 'lateral_conv{}'.format(lvl_id), l_conv)
# setattr(self, 'fpn_conv{}'.format(lvl_id), fpn_conv)
# add extra conv layers (e.g., RetinaNet)
extra_levels
=
num_outs
-
self
.
backbone_end_level
+
self
.
start_level
if
add_extra_convs
and
extra_levels
>=
1
:
for
i
in
range
(
extra_levels
):
in_channels
=
(
self
.
in_channels
[
self
.
backbone_end_level
-
1
]
if
i
==
0
else
out_channels
)
if
i
==
0
and
self
.
extra_convs_on_inputs
:
in_channels
=
self
.
in_channels
[
self
.
backbone_end_level
-
1
]
else
:
in_channels
=
out_channels
extra_fpn_conv
=
ConvModule
(
in_channels
,
out_channels
,
...
...
@@ -121,8 +121,11 @@ class FPN(nn.Module):
outs
.
append
(
F
.
max_pool2d
(
outs
[
-
1
],
1
,
stride
=
2
))
# add conv layers on top of original feature maps (RetinaNet)
else
:
if
self
.
extra_convs_on_inputs
:
orig
=
inputs
[
self
.
backbone_end_level
-
1
]
outs
.
append
(
self
.
fpn_convs
[
used_backbone_levels
](
orig
))
else
:
outs
.
append
(
self
.
fpn_convs
[
used_backbone_levels
](
outs
[
-
1
]))
for
i
in
range
(
used_backbone_levels
+
1
,
self
.
num_outs
):
# BUG: we should add relu before each extra conv
outs
.
append
(
self
.
fpn_convs
[
i
](
outs
[
-
1
]))
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment