Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
OpenDAS
OpenPCDet
Commits
19bf91e3
"tools/vscode:/vscode.git/clone" did not exist on "8cacccec11db6f59bf6934600c9a175dae254806"
Commit
19bf91e3
authored
Jun 18, 2022
by
Shaoshuai Shi
Browse files
refactor/rename some parts of the codes
parent
5666ea67
Changes
7
Show whitespace changes
Inline
Side-by-side
Showing
7 changed files
with
71 additions
and
57 deletions
+71
-57
pcdet/models/backbones_3d/focal_sparse_conv/focal_sparse_conv.py
...odels/backbones_3d/focal_sparse_conv/focal_sparse_conv.py
+2
-3
pcdet/models/backbones_3d/focal_sparse_conv/focal_sparse_utils.py
...dels/backbones_3d/focal_sparse_conv/focal_sparse_utils.py
+0
-0
pcdet/models/backbones_3d/spconv_backbone_focal.py
pcdet/models/backbones_3d/spconv_backbone_focal.py
+13
-1
pcdet/models/detectors/pv_rcnn.py
pcdet/models/detectors/pv_rcnn.py
+5
-3
pcdet/models/detectors/voxel_rcnn.py
pcdet/models/detectors/voxel_rcnn.py
+5
-4
pcdet/utils/box2d_utils.py
pcdet/utils/box2d_utils.py
+0
-46
pcdet/utils/box_utils.py
pcdet/utils/box_utils.py
+46
-0
No files found.
pcdet/models/backbones_3d/focal_sparse_conv/focal_sparse_conv.py
View file @
19bf91e3
...
...
@@ -2,7 +2,7 @@ import torch
import
torch.nn
as
nn
import
spconv.pytorch
as
spconv
from
pcdet.ops.roiaware_pool3d.roiaware_pool3d_utils
import
points_in_boxes_gpu
from
pcdet.models.backbones_3d.focal_sparse_conv.utils
import
split_voxels
,
check_repeat
,
FocalLoss
from
pcdet.models.backbones_3d.focal_sparse_conv.
focal_sparse_
utils
import
split_voxels
,
check_repeat
,
FocalLoss
from
pcdet.utils
import
common_utils
...
...
@@ -221,5 +221,4 @@ class FocalSparseConv(spconv.SparseModule):
out
=
out
.
replace_feature
(
self
.
bn1
(
out
.
features
))
out
=
out
.
replace_feature
(
self
.
relu
(
out
.
features
))
batch_dict
[
'loss_box_of_pts'
]
+=
loss_box_of_pts
return
out
,
batch_dict
return
out
,
batch_dict
,
loss_box_of_pts
pcdet/models/backbones_3d/focal_sparse_conv/utils.py
→
pcdet/models/backbones_3d/focal_sparse_conv/
focal_sparse_
utils.py
View file @
19bf91e3
File moved
pcdet/models/backbones_3d/spconv_backbone_focal.py
View file @
19bf91e3
...
...
@@ -196,6 +196,15 @@ class VoxelBackBone8xFocal(nn.Module):
'x_conv4'
:
64
}
self
.
forward_ret_dict
=
{}
def
get_loss
(
self
,
tb_dict
=
None
):
loss
=
self
.
forward_ret_dict
[
'loss_box_of_pts'
]
if
tb_dict
is
None
:
tb_dict
=
{}
tb_dict
[
'loss_box_of_pts'
]
=
loss
.
item
()
return
loss
,
tb_dict
def
forward
(
self
,
batch_dict
):
"""
Args:
...
...
@@ -221,9 +230,12 @@ class VoxelBackBone8xFocal(nn.Module):
x
=
self
.
conv_input
(
input_sp_tensor
)
x_conv1
,
batch_dict
=
self
.
conv1
(
x
,
batch_dict
)
loss_box_of_pts
=
0
if
self
.
use_img
:
x_image
=
self
.
semseg
(
batch_dict
[
'images'
])[
'layer1_feat2d'
]
x_conv1
,
batch_dict
=
self
.
conv_focal_multimodal
(
x_conv1
,
batch_dict
,
x_image
)
x_conv1
,
batch_dict
,
loss_box_of_pts
=
self
.
conv_focal_multimodal
(
x_conv1
,
batch_dict
,
x_image
)
self
.
forward_ret_dict
[
'loss_box_of_pts'
]
=
loss_box_of_pts
x_conv2
,
batch_dict
=
self
.
conv2
(
x_conv1
,
batch_dict
)
x_conv3
,
batch_dict
=
self
.
conv3
(
x_conv2
,
batch_dict
)
...
...
pcdet/models/detectors/pv_rcnn.py
View file @
19bf91e3
...
...
@@ -12,9 +12,6 @@ class PVRCNN(Detector3DTemplate):
if
self
.
training
:
loss
,
tb_dict
,
disp_dict
=
self
.
get_training_loss
()
if
'loss_box_of_pts'
in
batch_dict
:
loss
+=
batch_dict
[
'loss_box_of_pts'
]
tb_dict
[
'loss_box_of_pts'
]
=
batch_dict
[
'loss_box_of_pts'
]
ret_dict
=
{
'loss'
:
loss
...
...
@@ -31,4 +28,9 @@ class PVRCNN(Detector3DTemplate):
loss_rcnn
,
tb_dict
=
self
.
roi_head
.
get_loss
(
tb_dict
)
loss
=
loss_rpn
+
loss_point
+
loss_rcnn
if
hasattr
(
self
.
backbone_3d
,
'get_loss'
):
loss_backbone3d
,
tb_dict
=
self
.
backbone_3d
.
get_loss
(
tb_dict
)
loss
+=
loss_backbone3d
return
loss
,
tb_dict
,
disp_dict
pcdet/models/detectors/voxel_rcnn.py
View file @
19bf91e3
...
...
@@ -13,10 +13,6 @@ class VoxelRCNN(Detector3DTemplate):
if
self
.
training
:
loss
,
tb_dict
,
disp_dict
=
self
.
get_training_loss
()
if
'loss_box_of_pts'
in
batch_dict
:
loss
+=
batch_dict
[
'loss_box_of_pts'
]
tb_dict
[
'loss_box_of_pts'
]
=
batch_dict
[
'loss_box_of_pts'
]
ret_dict
=
{
'loss'
:
loss
}
...
...
@@ -33,4 +29,9 @@ class VoxelRCNN(Detector3DTemplate):
loss_rcnn
,
tb_dict
=
self
.
roi_head
.
get_loss
(
tb_dict
)
loss
=
loss
+
loss_rpn
+
loss_rcnn
if
hasattr
(
self
.
backbone_3d
,
'get_loss'
):
loss_backbone3d
,
tb_dict
=
self
.
backbone_3d
.
get_loss
(
tb_dict
)
loss
+=
loss_backbone3d
return
loss
,
tb_dict
,
disp_dict
pcdet/utils/box2d_utils.py
deleted
100755 → 0
View file @
5666ea67
import
torch
def
area
(
box
)
->
torch
.
Tensor
:
"""
Computes the area of all the boxes.
Returns:
torch.Tensor: a vector with areas of each box.
"""
area
=
(
box
[:,
2
]
-
box
[:,
0
])
*
(
box
[:,
3
]
-
box
[:,
1
])
return
area
# implementation from https://github.com/kuangliu/torchcv/blob/master/torchcv/utils/box.py
# with slight modifications
def
pairwise_iou
(
boxes1
,
boxes2
)
->
torch
.
Tensor
:
"""
Given two lists of boxes of size N and M,
compute the IoU (intersection over union)
between __all__ N x M pairs of boxes.
The box order must be (xmin, ymin, xmax, ymax).
Args:
boxes1,boxes2 (Boxes): two `Boxes`. Contains N & M boxes, respectively.
Returns:
Tensor: IoU, sized [N,M].
"""
area1
=
area
(
boxes1
)
area2
=
area
(
boxes2
)
width_height
=
torch
.
min
(
boxes1
[:,
None
,
2
:],
boxes2
[:,
2
:])
-
torch
.
max
(
boxes1
[:,
None
,
:
2
],
boxes2
[:,
:
2
]
)
# [N,M,2]
width_height
.
clamp_
(
min
=
0
)
# [N,M,2]
inter
=
width_height
.
prod
(
dim
=
2
)
# [N,M]
del
width_height
# handle empty boxes
iou
=
torch
.
where
(
inter
>
0
,
inter
/
(
area1
[:,
None
]
+
area2
-
inter
),
torch
.
zeros
(
1
,
dtype
=
inter
.
dtype
,
device
=
inter
.
device
),
)
return
iou
\ No newline at end of file
pcdet/utils/box_utils.py
View file @
19bf91e3
...
...
@@ -333,3 +333,49 @@ def boxes3d_nearest_bev_iou(boxes_a, boxes_b):
boxes_bev_b
=
boxes3d_lidar_to_aligned_bev_boxes
(
boxes_b
)
return
boxes_iou_normal
(
boxes_bev_a
,
boxes_bev_b
)
def
area
(
box
)
->
torch
.
Tensor
:
"""
Computes the area of all the boxes.
Returns:
torch.Tensor: a vector with areas of each box.
"""
area
=
(
box
[:,
2
]
-
box
[:,
0
])
*
(
box
[:,
3
]
-
box
[:,
1
])
return
area
# implementation from https://github.com/kuangliu/torchcv/blob/master/torchcv/utils/box.py
# with slight modifications
def
pairwise_iou
(
boxes1
,
boxes2
)
->
torch
.
Tensor
:
"""
Given two lists of boxes of size N and M,
compute the IoU (intersection over union)
between __all__ N x M pairs of boxes.
The box order must be (xmin, ymin, xmax, ymax).
Args:
boxes1,boxes2 (Boxes): two `Boxes`. Contains N & M boxes, respectively.
Returns:
Tensor: IoU, sized [N,M].
"""
area1
=
area
(
boxes1
)
area2
=
area
(
boxes2
)
width_height
=
torch
.
min
(
boxes1
[:,
None
,
2
:],
boxes2
[:,
2
:])
-
torch
.
max
(
boxes1
[:,
None
,
:
2
],
boxes2
[:,
:
2
]
)
# [N,M,2]
width_height
.
clamp_
(
min
=
0
)
# [N,M,2]
inter
=
width_height
.
prod
(
dim
=
2
)
# [N,M]
del
width_height
# handle empty boxes
iou
=
torch
.
where
(
inter
>
0
,
inter
/
(
area1
[:,
None
]
+
area2
-
inter
),
torch
.
zeros
(
1
,
dtype
=
inter
.
dtype
,
device
=
inter
.
device
),
)
return
iou
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment