Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
OpenDAS
OpenPCDet
Commits
19de178d
Commit
19de178d
authored
Nov 04, 2021
by
acivgin1
Browse files
switch to replace_feature method
parent
3817a135
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
13 additions
and
13 deletions
+13
-13
pcdet/models/backbones_3d/spconv_backbone.py
pcdet/models/backbones_3d/spconv_backbone.py
+5
-5
pcdet/models/backbones_3d/spconv_unet.py
pcdet/models/backbones_3d/spconv_unet.py
+8
-8
No files found.
pcdet/models/backbones_3d/spconv_backbone.py
View file @
19de178d
...
@@ -50,17 +50,17 @@ class SparseBasicBlock(spconv.SparseModule):
...
@@ -50,17 +50,17 @@ class SparseBasicBlock(spconv.SparseModule):
identity
=
x
identity
=
x
out
=
self
.
conv1
(
x
)
out
=
self
.
conv1
(
x
)
out
.
feature
s
=
self
.
bn1
(
out
.
features
)
out
=
out
.
replace_
feature
(
self
.
bn1
(
out
.
features
)
)
out
.
feature
s
=
self
.
relu
(
out
.
features
)
out
=
out
.
replace_
feature
(
self
.
relu
(
out
.
features
)
)
out
=
self
.
conv2
(
out
)
out
=
self
.
conv2
(
out
)
out
.
feature
s
=
self
.
bn2
(
out
.
features
)
out
=
out
.
replace_
feature
(
self
.
bn2
(
out
.
features
)
)
if
self
.
downsample
is
not
None
:
if
self
.
downsample
is
not
None
:
identity
=
self
.
downsample
(
x
)
identity
=
self
.
downsample
(
x
)
out
.
features
+
=
identity
.
features
out
=
out
.
replace_feature
(
out
.
features
+
identity
.
features
)
out
.
feature
s
=
self
.
relu
(
out
.
features
)
out
=
out
.
replace_
feature
(
self
.
relu
(
out
.
features
)
)
return
out
return
out
...
...
pcdet/models/backbones_3d/spconv_unet.py
View file @
19de178d
...
@@ -31,17 +31,17 @@ class SparseBasicBlock(spconv.SparseModule):
...
@@ -31,17 +31,17 @@ class SparseBasicBlock(spconv.SparseModule):
assert
x
.
features
.
dim
()
==
2
,
'x.features.dim()=%d'
%
x
.
features
.
dim
()
assert
x
.
features
.
dim
()
==
2
,
'x.features.dim()=%d'
%
x
.
features
.
dim
()
out
=
self
.
conv1
(
x
)
out
=
self
.
conv1
(
x
)
out
.
feature
s
=
self
.
bn1
(
out
.
features
)
out
=
out
.
replace_
feature
(
self
.
bn1
(
out
.
features
)
)
out
.
feature
s
=
self
.
relu
(
out
.
features
)
out
=
out
.
replace_
feature
(
self
.
relu
(
out
.
features
)
)
out
=
self
.
conv2
(
out
)
out
=
self
.
conv2
(
out
)
out
.
feature
s
=
self
.
bn2
(
out
.
features
)
out
=
out
.
replace_
feature
(
self
.
bn2
(
out
.
features
)
)
if
self
.
downsample
is
not
None
:
if
self
.
downsample
is
not
None
:
identity
=
self
.
downsample
(
x
)
identity
=
self
.
downsample
(
x
)
out
.
features
+
=
identity
out
=
out
.
replace_feature
(
out
.
features
+
identity
)
out
.
feature
s
=
self
.
relu
(
out
.
features
)
out
=
out
.
replace_
feature
(
self
.
relu
(
out
.
features
)
)
return
out
return
out
...
@@ -134,10 +134,10 @@ class UNetV2(nn.Module):
...
@@ -134,10 +134,10 @@ class UNetV2(nn.Module):
def
UR_block_forward
(
self
,
x_lateral
,
x_bottom
,
conv_t
,
conv_m
,
conv_inv
):
def
UR_block_forward
(
self
,
x_lateral
,
x_bottom
,
conv_t
,
conv_m
,
conv_inv
):
x_trans
=
conv_t
(
x_lateral
)
x_trans
=
conv_t
(
x_lateral
)
x
=
x_trans
x
=
x_trans
x
.
feature
s
=
torch
.
cat
((
x_bottom
.
features
,
x_trans
.
features
),
dim
=
1
)
x
=
x
.
replace_
feature
(
torch
.
cat
((
x_bottom
.
features
,
x_trans
.
features
),
dim
=
1
)
)
x_m
=
conv_m
(
x
)
x_m
=
conv_m
(
x
)
x
=
self
.
channel_reduction
(
x
,
x_m
.
features
.
shape
[
1
])
x
=
self
.
channel_reduction
(
x
,
x_m
.
features
.
shape
[
1
])
x
.
feature
s
=
x_m
.
features
+
x
.
features
x
=
x
.
replace_
feature
(
x_m
.
features
+
x
.
features
)
x
=
conv_inv
(
x
)
x
=
conv_inv
(
x
)
return
x
return
x
...
@@ -155,7 +155,7 @@ class UNetV2(nn.Module):
...
@@ -155,7 +155,7 @@ class UNetV2(nn.Module):
n
,
in_channels
=
features
.
shape
n
,
in_channels
=
features
.
shape
assert
(
in_channels
%
out_channels
==
0
)
and
(
in_channels
>=
out_channels
)
assert
(
in_channels
%
out_channels
==
0
)
and
(
in_channels
>=
out_channels
)
x
.
feature
s
=
features
.
view
(
n
,
out_channels
,
-
1
).
sum
(
dim
=
2
)
x
=
x
.
replace_
feature
(
features
.
view
(
n
,
out_channels
,
-
1
).
sum
(
dim
=
2
)
)
return
x
return
x
def
forward
(
self
,
batch_dict
):
def
forward
(
self
,
batch_dict
):
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment