Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
OpenDAS
mmdetection3d
Commits
21cb2aa6
Commit
21cb2aa6
authored
Jul 09, 2020
by
wangtai
Committed by
zhangwenwei
Jul 09, 2020
Browse files
Refine docstrings
parent
cfc3f545
Changes
39
Hide whitespace changes
Inline
Side-by-side
Showing
20 changed files
with
211 additions
and
194 deletions
+211
-194
mmdet3d/core/anchor/anchor_3d_generator.py
mmdet3d/core/anchor/anchor_3d_generator.py
+8
-8
mmdet3d/core/bbox/box_np_ops.py
mmdet3d/core/bbox/box_np_ops.py
+45
-34
mmdet3d/core/bbox/coders/delta_xyzwhlr_bbox_coder.py
mmdet3d/core/bbox/coders/delta_xyzwhlr_bbox_coder.py
+1
-1
mmdet3d/core/bbox/coders/partial_bin_based_bbox_coder.py
mmdet3d/core/bbox/coders/partial_bin_based_bbox_coder.py
+10
-8
mmdet3d/core/bbox/iou_calculators/iou3d_calculator.py
mmdet3d/core/bbox/iou_calculators/iou3d_calculator.py
+5
-5
mmdet3d/core/bbox/samplers/iou_neg_piecewise_sampler.py
mmdet3d/core/bbox/samplers/iou_neg_piecewise_sampler.py
+11
-10
mmdet3d/core/bbox/structures/base_box3d.py
mmdet3d/core/bbox/structures/base_box3d.py
+19
-19
mmdet3d/core/bbox/structures/cam_box3d.py
mmdet3d/core/bbox/structures/cam_box3d.py
+1
-2
mmdet3d/core/bbox/structures/depth_box3d.py
mmdet3d/core/bbox/structures/depth_box3d.py
+3
-3
mmdet3d/core/bbox/structures/lidar_box3d.py
mmdet3d/core/bbox/structures/lidar_box3d.py
+3
-3
mmdet3d/core/bbox/structures/utils.py
mmdet3d/core/bbox/structures/utils.py
+3
-3
mmdet3d/core/bbox/transforms.py
mmdet3d/core/bbox/transforms.py
+4
-4
mmdet3d/core/evaluation/indoor_eval.py
mmdet3d/core/evaluation/indoor_eval.py
+14
-11
mmdet3d/core/evaluation/lyft_eval.py
mmdet3d/core/evaluation/lyft_eval.py
+4
-3
mmdet3d/core/post_processing/merge_augs.py
mmdet3d/core/post_processing/merge_augs.py
+8
-8
mmdet3d/core/voxel/voxel_generator.py
mmdet3d/core/voxel/voxel_generator.py
+27
-27
mmdet3d/datasets/custom_3d.py
mmdet3d/datasets/custom_3d.py
+17
-17
mmdet3d/datasets/kitti2d_dataset.py
mmdet3d/datasets/kitti2d_dataset.py
+3
-3
mmdet3d/datasets/kitti_dataset.py
mmdet3d/datasets/kitti_dataset.py
+17
-17
mmdet3d/datasets/lyft_dataset.py
mmdet3d/datasets/lyft_dataset.py
+8
-8
No files found.
mmdet3d/core/anchor/anchor_3d_generator.py
View file @
21cb2aa6
...
...
@@ -20,10 +20,10 @@ class Anchor3DRangeGenerator(object):
vary for different anchor sizes if size_per_range is True.
sizes (list[list[float]]): 3D sizes of anchors.
scales (list[int]): Scales of anchors in different feature levels.
rotations (list
(
float
)
): Rotations of anchors in a feature grid.
custom_values (tuple
(
float
)
): Customized values of that anchor. For
rotations (list
[
float
]
): Rotations of anchors in a feature grid.
custom_values (tuple
[
float
]
): Customized values of that anchor. For
example, in nuScenes the anchors have velocities.
reshape_out (bool): Whether to reshape the output into (N
x
4)
reshape_out (bool): Whether to reshape the output into (N
x
4)
.
size_per_range: Whether to use separate ranges for different sizes.
If size_per_range is True, the ranges should have the same length
as the sizes, if not, it will be duplicated.
...
...
@@ -69,14 +69,14 @@ class Anchor3DRangeGenerator(object):
@
property
def
num_base_anchors
(
self
):
"""list[int]:
t
otal number of base anchors in a feature grid"""
"""list[int]:
T
otal number of base anchors in a feature grid
.
"""
num_rot
=
len
(
self
.
rotations
)
num_size
=
torch
.
tensor
(
self
.
sizes
).
reshape
(
-
1
,
3
).
size
(
0
)
return
num_rot
*
num_size
@
property
def
num_levels
(
self
):
"""int:
n
umber of feature levels that the generator
will be
applied"""
"""int:
N
umber of feature levels that the generator
is
applied
.
"""
return
len
(
self
.
scales
)
def
grid_anchors
(
self
,
featmap_sizes
,
device
=
'cuda'
):
...
...
@@ -168,7 +168,7 @@ class Anchor3DRangeGenerator(object):
device (str): Devices that the anchors will be put on.
Returns:
torch.Tensor:
a
nchors with shape
\
torch.Tensor:
A
nchors with shape
\
[*feature_size, num_sizes, num_rots, 7].
"""
if
len
(
feature_size
)
==
2
:
...
...
@@ -226,7 +226,7 @@ class AlignedAnchor3DRangeGenerator(Anchor3DRangeGenerator):
according to the feature map sizes.
However, this makes the anchors center does not match the feature grid.
The AlignedAnchor3DRangeGenerator add + 1 when using the feature map sizes
to obtain the corners of the voxel grid. Then it shift the coordinates to
to obtain the corners of the voxel grid. Then it shift
s
the coordinates to
the center of voxel grid of use the left up corner to distribute anchors.
Args:
...
...
@@ -263,7 +263,7 @@ class AlignedAnchor3DRangeGenerator(Anchor3DRangeGenerator):
device (str): Devices that the anchors will be put on.
Returns:
torch.Tensor:
a
nchors with shape
\
torch.Tensor:
A
nchors with shape
\
[*feature_size, num_sizes, num_rots, 7].
"""
if
len
(
feature_size
)
==
2
:
...
...
mmdet3d/core/bbox/box_np_ops.py
View file @
21cb2aa6
...
...
@@ -58,8 +58,9 @@ def rotation_2d(points, angles):
"""Rotation 2d points based on origin point clockwise when angle positive.
Args:
points (np.ndarray, shape=[N, point_size, 2]): points to be rotated.
angles (np.ndarray, shape=[N]): rotation angle.
points (np.ndarray): Points to be rotated with shape
\
(N, point_size, 2).
angles (np.ndarray): Rotation angle with shape (N).
Returns:
np.ndarray: Same shape as points.
...
...
@@ -75,12 +76,12 @@ def center_to_corner_box2d(centers, dims, angles=None, origin=0.5):
format: center(xy), dims(xy), angles(clockwise when positive)
Args:
centers (np.ndarray
, shape=[N, 2]
):
l
ocations in kitti label file.
dims (np.ndarray
, shape=[N, 2]
):
d
imensions in kitti label file.
angles (np.ndarray
, shape=[N]
):
r
otation_y in kitti label file.
centers (np.ndarray):
L
ocations in kitti label file
with shape (N, 2)
.
dims (np.ndarray):
D
imensions in kitti label file
with shape (N, 2)
.
angles (np.ndarray):
R
otation_y in kitti label file
with shape (N)
.
Returns:
np.ndarray: Corners with the shape of
[
N, 4, 2
]
.
np.ndarray: Corners with the shape of
(
N, 4, 2
)
.
"""
# 'length' in kitti format is in x axis.
# xyz(hwl)(kitti label file)<->xyz(lhw)(camera)<->z(-x)(-y)(wlh)(lidar)
...
...
@@ -146,15 +147,15 @@ def center_to_corner_box3d(centers,
"""Convert kitti locations, dimensions and angles to corners.
Args:
centers (np.ndarray
, shape=[N, 3]
): Locations in kitti label file.
dims (np.ndarray
, shape=[N, 3]
): Dimensions in kitti label file.
angles (np.ndarray
, shape=[N]
): Rotation_y in kitti label file.
centers (np.ndarray): Locations in kitti label file
with shape (N, 3)
.
dims (np.ndarray): Dimensions in kitti label file
with shape (N, 3)
.
angles (np.ndarray): Rotation_y in kitti label file
with shape (N)
.
origin (list or array or float): Origin point relate to smallest point.
use (0.5, 1.0, 0.5) in camera and (0.5, 0.5, 0) in lidar.
axis (int): Rotation axis. 1 for camera and 2 for lidar.
Returns:
np.ndarray: Corners with the shape of
[
N, 8, 3
]
.
np.ndarray: Corners with the shape of
(
N, 8, 3
)
.
"""
# 'length' in kitti format is in x axis.
# yzx(hwl)(kitti label file)<->xyz(lhw)(camera)<->z(-x)(-y)(wlh)(lidar)
...
...
@@ -209,11 +210,10 @@ def corner_to_surfaces_3d_jit(corners):
normal vectors all direct to internal.
Args:
corners (np.ndarray, [N, 8, 3]): 3d box corners
with the shape of [N, 8, 3].
corners (np.ndarray): 3d box corners with the shape of (N, 8, 3).
Returns:
np.ndarray: Surfaces with the shape of
[
N, 6, 4, 3
]
.
np.ndarray: Surfaces with the shape of
(
N, 6, 4, 3
)
.
"""
# box_corners: [N, 8, 3], must from corner functions in this module
num_boxes
=
corners
.
shape
[
0
]
...
...
@@ -275,10 +275,10 @@ def corner_to_surfaces_3d(corners):
normal vectors all direct to internal.
Args:
corners (np.ndarray
, [N, 8, 3]
): 3
d
box corners.
corners (np.ndarray): 3
D
box corners
with shape of (N, 8, 3)
.
Returns:
np.ndarray: Surfaces with the shape of
[
N, 6, 4, 3
]
.
np.ndarray: Surfaces with the shape of
(
N, 6, 4, 3
)
.
"""
# box_corners: [N, 8, 3], must from corner functions in this module
surfaces
=
np
.
array
([
...
...
@@ -320,11 +320,20 @@ def create_anchors_3d_range(feature_size,
dtype
=
np
.
float32
):
"""
Args:
feature_size: list [D, H, W](zyx)
sizes: [N, 3] list of list or array, size of anchors, xyz
feature_size (list[float] | tuple[float]): Feature map size. It is
either a list of a tuple of [D, H, W](in order of z, y, and x).
anchor_range (torch.Tensor | list[float]): Range of anchors with
shape [6]. The order is consistent with that of anchors, i.e.,
(x_min, y_min, z_min, x_max, y_max, z_max).
sizes (list[list] | np.ndarray | torch.Tensor): Anchor size with
shape [N, 3], in order of x, y, z.
rotations (list[float] | np.ndarray | torch.Tensor): Rotations of
anchors in a single feature grid.
dtype (type): Data type. Default to np.float32.
Returns:
np.ndarray: [*feature_size, num_sizes, num_rots, 7].
np.ndarray: Range based anchors with shape of
\
(*feature_size, num_sizes, num_rots, 7).
"""
anchor_range
=
np
.
array
(
anchor_range
,
dtype
)
z_centers
=
np
.
linspace
(
...
...
@@ -366,11 +375,12 @@ def rbbox2d_to_near_bbox(rbboxes):
"""convert rotated bbox to nearest 'standing' or 'lying' bbox.
Args:
rbboxes (np.ndarray): [N, 5(x, y, xdim, ydim, rad)] rotated bboxes.
rbboxes (np.ndarray): Rotated bboxes with shape of
\
(N, 5(x, y, xdim, ydim, rad)).
Returns:
np.ndarray: Bboxes with the shpae of
[
N, 4(xmin, ymin, xmax, ymax)
]
.
np.ndarray: B
ounding
boxes with the shpae of
(
N, 4(xmin, ymin, xmax, ymax)
)
.
"""
rots
=
rbboxes
[...,
-
1
]
rots_0_pi_div_2
=
np
.
abs
(
limit_period
(
rots
,
0.5
,
np
.
pi
))
...
...
@@ -382,12 +392,12 @@ def rbbox2d_to_near_bbox(rbboxes):
@
numba
.
jit
(
nopython
=
True
)
def
iou_jit
(
boxes
,
query_boxes
,
mode
=
'iou'
,
eps
=
0.0
):
"""Calculate box iou.
n
ote that jit version runs ~10x faster than the
"""Calculate box iou.
N
ote that jit version runs ~10x faster than the
box_overlaps function in mmdet3d.core.evaluation.
Args:
boxes (np.ndarray):
(N, 4) ndarray of float
query_boxes (np.ndarray):
(K, 4) ndarray of float
boxes (np.ndarray):
Input bounding boxes with shape of (N, 4).
query_boxes (np.ndarray):
Query boxes with shape of (K, 4).
Returns:
np.ndarray: Overlap between boxes and query_boxes
...
...
@@ -515,13 +525,13 @@ def points_in_convex_polygon_3d_jit(points,
"""Check points is in 3d convex polygons.
Args:
points (np.ndarray):
[
num_points, 3
] array
.
polygon_surfaces (np.ndarray):
[num_polygon, max_num_surfaces,
max_num_points_of_surface, 3
]
array. a
ll surfaces' normal vector must direct to internal.
m
ax_num_points_of_surface must at least 3.
num_surfaces (np.ndarray):
[num_polygon] array.
indicate how many surfaces a polygon contain
points (np.ndarray):
Input points with shape of (
num_points, 3
)
.
polygon_surfaces (np.ndarray):
Polygon surfaces with shape of
\
(num_polygon, max_num_surfaces,
max_num_points_of_surface, 3
).
\
A
ll surfaces' normal vector must direct to internal.
\
M
ax_num_points_of_surface must at least 3.
num_surfaces (np.ndarray):
Number of surfaces a polygon contains
\
shape of (num_polygon).
Returns:
np.ndarray: Result matrix with the shape of [num_points, num_polygon].
...
...
@@ -595,9 +605,10 @@ def boxes3d_to_corners3d_lidar(boxes3d, bottom_center=True):
2 -------- 1
Args:
boxes3d (np.ndarray): (N, 7) [x, y, z, w, l, h, ry] in LiDAR coords,
see the definition of ry in KITTI dataset
bottom_center (bool): whether z is on the bottom center of object.
boxes3d (np.ndarray): Boxes with shape of (N, 7)
\
[x, y, z, w, l, h, ry] in LiDAR coords, see the definition of ry
\
in KITTI dataset.
bottom_center (bool): Whether z is on the bottom center of object.
Returns:
np.ndarray: Box corners with the shape of [N, 8, 3].
...
...
mmdet3d/core/bbox/coders/delta_xyzwhlr_bbox_coder.py
View file @
21cb2aa6
...
...
@@ -28,7 +28,7 @@ class DeltaXYZWLHRBBoxCoder(BaseBBoxCoder):
ground-truth boxes.
Returns:
torch.Tensor: Box transformation deltas
torch.Tensor: Box transformation deltas
.
"""
box_ndim
=
src_boxes
.
shape
[
-
1
]
cas
,
cgs
,
cts
=
[],
[],
[]
...
...
mmdet3d/core/bbox/coders/partial_bin_based_bbox_coder.py
View file @
21cb2aa6
...
...
@@ -28,8 +28,9 @@ class PartialBinBasedBBoxCoder(BaseBBoxCoder):
"""Encode ground truth to prediction targets.
Args:
gt_bboxes_3d (BaseInstance3DBoxes): gt bboxes with shape (n, 7).
gt_labels_3d (torch.Tensor): gt classes.
gt_bboxes_3d (BaseInstance3DBoxes): Ground truth bboxes
\
with shape (n, 7).
gt_labels_3d (torch.Tensor): Ground truth classes.
Returns:
tuple: Targets of center, size and direction.
...
...
@@ -58,7 +59,8 @@ class PartialBinBasedBBoxCoder(BaseBBoxCoder):
"""Decode predicted parts to bbox3d.
Args:
bbox_out (dict): predictions from model, should contain keys below
bbox_out (dict): Predictions from model, should contain keys below.
- center: predicted bottom center of bboxes.
- dir_class: predicted bbox direction class.
- dir_res: predicted bbox direction residual.
...
...
@@ -66,7 +68,7 @@ class PartialBinBasedBBoxCoder(BaseBBoxCoder):
- size_res: predicted bbox size residual.
Returns:
torch.Tensor:
d
ecoded bbox3d with shape (batch, n, 7)
torch.Tensor:
D
ecoded bbox3d with shape (batch, n, 7)
.
"""
center
=
bbox_out
[
'center'
]
batch_size
,
num_proposal
=
center
.
shape
[:
2
]
...
...
@@ -98,11 +100,11 @@ class PartialBinBasedBBoxCoder(BaseBBoxCoder):
"""Split predicted features to specific parts.
Args:
preds (torch.Tensor):
p
redicted features to split.
base_xyz (torch.Tensor):
c
oordinates of points.
preds (torch.Tensor):
P
redicted features to split.
base_xyz (torch.Tensor):
C
oordinates of points.
Returns:
dict[str, torch.Tensor]:
s
plit results.
dict[str, torch.Tensor]:
S
plit results.
"""
results
=
{}
start
,
end
=
0
,
0
...
...
@@ -183,7 +185,7 @@ class PartialBinBasedBBoxCoder(BaseBBoxCoder):
limit_period (bool): Whether to limit angle to [-pi, pi].
Returns:
torch.Tensor:
a
ngle decoded from angle_cls and angle_res.
torch.Tensor:
A
ngle decoded from angle_cls and angle_res.
"""
angle_per_class
=
2
*
np
.
pi
/
float
(
self
.
num_dir_bins
)
angle_center
=
angle_cls
.
float
()
*
angle_per_class
...
...
mmdet3d/core/bbox/iou_calculators/iou3d_calculator.py
View file @
21cb2aa6
...
...
@@ -12,7 +12,7 @@ class BboxOverlapsNearest3D(object):
(BEV), and then calculate the 2D IoU using :meth:`bbox_overlaps`.
Args:
coordinate (str): 'camera', 'lidar', or 'depth' coordinate system
coordinate (str): 'camera', 'lidar', or 'depth' coordinate system
.
"""
def
__init__
(
self
,
coordinate
=
'lidar'
):
...
...
@@ -32,7 +32,7 @@ class BboxOverlapsNearest3D(object):
bboxes2 (torch.Tensor): shape (M, 7+N) [x, y, z, h, w, l, ry, v].
mode (str): "iou" (intersection over union) or iof
(intersection over foreground).
is_aligned (bool): Whether the calculation is aligned
is_aligned (bool): Whether the calculation is aligned
.
Return:
torch.Tensor: If ``is_aligned`` is ``True``, return ious between
\
...
...
@@ -43,7 +43,7 @@ class BboxOverlapsNearest3D(object):
self
.
coordinate
)
def
__repr__
(
self
):
"""str:
r
eturn a string that describes the module"""
"""str:
R
eturn a string that describes the module
.
"""
repr_str
=
self
.
__class__
.
__name__
repr_str
+=
f
'(coordinate=
{
self
.
coordinate
}
'
return
repr_str
...
...
@@ -98,8 +98,8 @@ def bbox_overlaps_nearest_3d(bboxes1,
Note:
This function first finds the nearest 2D boxes in bird eye view
(BEV), and then calculate the 2D IoU using
``
:meth:bbox_overlaps`
`
.
Ths IoU calculator
``
:class:BboxOverlapsNearest3D`
`
uses this
(BEV), and then calculate
s
the 2D IoU using :meth:
`
bbox_overlaps`.
Ths IoU calculator :class:
`
BboxOverlapsNearest3D` uses this
function to calculate IoUs of boxes.
If ``is_aligned`` is ``False``, then it calculates the ious between
...
...
mmdet3d/core/bbox/samplers/iou_neg_piecewise_sampler.py
View file @
21cb2aa6
...
...
@@ -14,15 +14,15 @@ class IoUNegPiecewiseSampler(RandomSampler):
by `neg_piece_fractions`.
Args:
num (int):
n
umber of proposals.
pos_fraction (float):
t
he fraction of positive proposals.
neg_piece_fractions (list):
a
list contains fractions that indicates
num (int):
N
umber of proposals.
pos_fraction (float):
T
he fraction of positive proposals.
neg_piece_fractions (list):
A
list contains fractions that indicates
the ratio of each piece of total negtive samplers.
neg_iou_piece_thrs (list):
a
list contains IoU thresholds that
neg_iou_piece_thrs (list):
A
list contains IoU thresholds that
indicate the upper bound of this piece.
neg_pos_ub (float):
t
he total ratio to limit the upper bound
number of negtive samples
add_gt_as_proposals (bool):
w
hether to add gt as proposals.
neg_pos_ub (float):
T
he total ratio to limit the upper bound
number of negtive samples
.
add_gt_as_proposals (bool):
W
hether to add gt as proposals.
"""
def
__init__
(
self
,
...
...
@@ -107,9 +107,10 @@ class IoUNegPiecewiseSampler(RandomSampler):
Args:
assign_result (:obj:`AssignResult`): Bbox assigning results.
bboxes (Tensor): Boxes to be sampled from.
gt_bboxes (Tensor): Ground truth bboxes.
gt_labels (Tensor, optional): Class labels of ground truth bboxes.
bboxes (torch.Tensor): Boxes to be sampled from.
gt_bboxes (torch.Tensor): Ground truth bboxes.
gt_labels (torch.Tensor, optional): Class labels of ground truth
\
bboxes.
Returns:
:obj:`SamplingResult`: Sampling result.
...
...
mmdet3d/core/bbox/structures/base_box3d.py
View file @
21cb2aa6
...
...
@@ -15,7 +15,7 @@ class BaseInstance3DBoxes(object):
Args:
tensor (torch.Tensor | np.ndarray | list): a N x box_dim matrix.
box_dim (int):
n
umber of the dimension of a box
box_dim (int):
N
umber of the dimension of a box
Each row is (x, y, z, x_size, y_size, z_size, yaw).
Default to 7.
with_yaw (bool): Whether the box is with yaw rotation.
...
...
@@ -59,10 +59,10 @@ class BaseInstance3DBoxes(object):
@
property
def
volume
(
self
):
"""Compute
s
the volume of all the boxes.
"""Compute the volume of all the boxes.
Returns:
torch.Tensor:
a
vector with volume of each box.
torch.Tensor:
A
vector with volume of each box.
"""
return
self
.
tensor
[:,
3
]
*
self
.
tensor
[:,
4
]
*
self
.
tensor
[:,
5
]
...
...
@@ -70,10 +70,10 @@ class BaseInstance3DBoxes(object):
def
dims
(
self
):
"""Calculate the length in each dimension of all the boxes.
Convert the boxes to the form of (x_size, y_size, z_size)
Convert the boxes to the form of (x_size, y_size, z_size)
.
Returns:
torch.Tensor:
c
orners of each box with size (N, 8, 3)
torch.Tensor:
C
orners of each box with size (N, 8, 3)
.
"""
return
self
.
tensor
[:,
3
:
6
]
...
...
@@ -82,7 +82,7 @@ class BaseInstance3DBoxes(object):
"""Obtain the rotation of all the boxes.
Returns:
torch.Tensor:
a
vector with yaw of each box.
torch.Tensor:
A
vector with yaw of each box.
"""
return
self
.
tensor
[:,
6
]
...
...
@@ -91,7 +91,7 @@ class BaseInstance3DBoxes(object):
"""Obtain the height of all the boxes.
Returns:
torch.Tensor:
a
vector with height of each box.
torch.Tensor:
A
vector with height of each box.
"""
return
self
.
tensor
[:,
5
]
...
...
@@ -100,7 +100,7 @@ class BaseInstance3DBoxes(object):
"""Obtain the top height of all the boxes.
Returns:
torch.Tensor:
a
vector with the top height of each box.
torch.Tensor:
A
vector with the top height of each box.
"""
return
self
.
bottom_height
+
self
.
height
...
...
@@ -109,7 +109,7 @@ class BaseInstance3DBoxes(object):
"""Obtain the bottom's height of all the boxes.
Returns:
torch.Tensor:
a
vector with bottom's height of each box.
torch.Tensor:
A
vector with bottom's height of each box.
"""
return
self
.
tensor
[:,
2
]
...
...
@@ -128,7 +128,7 @@ class BaseInstance3DBoxes(object):
for more clear usage.
Returns:
torch.Tensor:
a
tensor with center of each box.
torch.Tensor:
A
tensor with center of each box.
"""
return
self
.
bottom_center
...
...
@@ -137,7 +137,7 @@ class BaseInstance3DBoxes(object):
"""Calculate the bottom center of all the boxes.
Returns:
torch.Tensor:
a
tensor with center of each box.
torch.Tensor:
A
tensor with center of each box.
"""
return
self
.
tensor
[:,
:
3
]
...
...
@@ -146,7 +146,7 @@ class BaseInstance3DBoxes(object):
"""Calculate the gravity center of all the boxes.
Returns:
torch.Tensor:
a
tensor with center of each box.
torch.Tensor:
A
tensor with center of each box.
"""
pass
...
...
@@ -164,8 +164,8 @@ class BaseInstance3DBoxes(object):
"""Calculate whether the points is in any of the boxes.
Args:
angles (float):
r
otation angles
axis (int):
t
he axis to rotate the boxes
angles (float):
R
otation angles
axis (int):
T
he axis to rotate the boxes
"""
pass
...
...
@@ -178,7 +178,7 @@ class BaseInstance3DBoxes(object):
"""Calculate whether the points is in any of the boxes.
Args:
trans_vector (torch.Tensor):
t
ranslation vector of size 1x3
trans_vector (torch.Tensor):
T
ranslation vector of size 1x3
.
"""
if
not
isinstance
(
trans_vector
,
torch
.
Tensor
):
trans_vector
=
self
.
tensor
.
new_tensor
(
trans_vector
)
...
...
@@ -194,7 +194,7 @@ class BaseInstance3DBoxes(object):
Note:
In the original implementation of SECOND, checking whether
a box in the range checks whether the points are in a convex
polygon, we try to reduce the burd
u
n for simpler cases.
polygon, we try to reduce the burd
e
n for simpler cases.
Returns:
torch.Tensor: A binary vector indicating whether each box is
\
...
...
@@ -227,7 +227,7 @@ class BaseInstance3DBoxes(object):
"""Convert self to `dst` mode.
Args:
dst (:obj:`BoxMode`):
t
he target Box mode
dst (:obj:`BoxMode`):
T
he target Box mode
。
rt_mat (np.ndarray | torch.Tensor): The rotation and translation
matrix between different coordinates. Defaults to None.
The conversion from `src` coordinates to `dst` coordinates
...
...
@@ -308,11 +308,11 @@ class BaseInstance3DBoxes(object):
return
original_type
(
b
,
box_dim
=
self
.
box_dim
,
with_yaw
=
self
.
with_yaw
)
def
__len__
(
self
):
"""int: Number of boxes in the current object"""
"""int: Number of boxes in the current object
.
"""
return
self
.
tensor
.
shape
[
0
]
def
__repr__
(
self
):
"""str: Return a strings that describes the object"""
"""str: Return a strings that describes the object
.
"""
return
self
.
__class__
.
__name__
+
'(
\n
'
+
str
(
self
.
tensor
)
+
')'
@
classmethod
...
...
mmdet3d/core/bbox/structures/cam_box3d.py
View file @
21cb2aa6
...
...
@@ -228,8 +228,7 @@ class CameraInstance3DBoxes(BaseInstance3DBoxes):
Note:
In the original implementation of SECOND, checking whether
a box in the range checks whether the points are in a convex
polygon, we try to reduce the burdun for simpler cases.
TODO: check whether this will effect the performance
polygon, we try to reduce the burden for simpler cases.
Returns:
torch.Tensor: Indicating whether each box is inside
...
...
mmdet3d/core/bbox/structures/depth_box3d.py
View file @
21cb2aa6
...
...
@@ -37,7 +37,7 @@ class DepthInstance3DBoxes(BaseInstance3DBoxes):
"""Calculate the gravity center of all the boxes.
Returns:
torch.Tensor:
a
tensor with center of each box.
torch.Tensor:
A
tensor with center of each box.
"""
bottom_center
=
self
.
bottom_center
gravity_center
=
torch
.
zeros_like
(
bottom_center
)
...
...
@@ -68,7 +68,7 @@ class DepthInstance3DBoxes(BaseInstance3DBoxes):
(x1, y0, z0)
Returns:
torch.Tensor:
c
orners of each box with size (N, 8, 3)
torch.Tensor:
C
orners of each box with size (N, 8, 3)
.
"""
# TODO: rotation_3d_in_axis function do not support
# empty tensor currently.
...
...
@@ -103,7 +103,7 @@ class DepthInstance3DBoxes(BaseInstance3DBoxes):
"""Calculate the 2D bounding boxes in BEV without rotation.
Returns:
torch.Tensor:
a
tensor of 2D BEV box of each box.
torch.Tensor:
A
tensor of 2D BEV box of each box.
"""
# Obtain BEV boxes with rotation in XYWHR format
bev_rotated_boxes
=
self
.
bev
...
...
mmdet3d/core/bbox/structures/lidar_box3d.py
View file @
21cb2aa6
...
...
@@ -68,7 +68,7 @@ class LiDARInstance3DBoxes(BaseInstance3DBoxes):
(x0, y0, z0)
Returns:
torch.Tensor:
c
orners of each box with size (N, 8, 3)
torch.Tensor:
C
orners of each box with size (N, 8, 3)
.
"""
# TODO: rotation_3d_in_axis function do not support
# empty tensor currently.
...
...
@@ -93,8 +93,8 @@ class LiDARInstance3DBoxes(BaseInstance3DBoxes):
"""Calculate the 2D bounding boxes in BEV with rotation.
Returns:
torch.Tensor: A nx5 tensor of 2D BEV box of each box.
The box is in XYWHR format
torch.Tensor: A nx5 tensor of 2D BEV box of each box.
\
The box is in XYWHR format
.
"""
return
self
.
tensor
[:,
[
0
,
1
,
3
,
4
,
6
]]
...
...
mmdet3d/core/bbox/structures/utils.py
View file @
21cb2aa6
...
...
@@ -7,12 +7,12 @@ def limit_period(val, offset=0.5, period=np.pi):
Args:
val (torch.Tensor): The value to be converted.
offset (float, optional): Offset to set the value range.
offset (float, optional): Offset to set the value range.
\
Defaults to 0.5.
period ([type], optional): Period of the value. Defaults to np.pi.
Returns:
torch.Tensor:
v
alue in the range of
\
torch.Tensor:
V
alue in the range of
\
[-offset * period, (1-offset) * period]
"""
return
val
-
torch
.
floor
(
val
/
period
+
offset
)
*
period
...
...
@@ -90,7 +90,7 @@ def get_box_type(box_type):
The valid value are "LiDAR", "Camera", or "Depth".
Returns:
tuple:
b
ox type and box mode.
tuple:
B
ox type and box mode.
"""
from
.box_3d_mode
import
(
Box3DMode
,
CameraInstance3DBoxes
,
DepthInstance3DBoxes
,
LiDARInstance3DBoxes
)
...
...
mmdet3d/core/bbox/transforms.py
View file @
21cb2aa6
...
...
@@ -27,7 +27,7 @@ def bbox3d2roi(bbox_list):
"""Convert a list of bboxes to roi format.
Args:
bbox_list (list[torch.Tensor]):
a
list of bboxes
bbox_list (list[torch.Tensor]):
A
list of bboxes
corresponding to a batch of images.
Returns:
...
...
@@ -49,9 +49,9 @@ def bbox3d2result(bboxes, scores, labels):
"""Convert detection results to a list of numpy arrays.
Args:
bboxes (torch.Tensor): shape (n, 5)
labels (torch.Tensor): shape (n, )
scores (torch.Tensor): shape (n, )
bboxes (torch.Tensor):
Bounding boxes with
shape
of
(n, 5)
.
labels (torch.Tensor):
Labels with
shape
of
(n, )
.
scores (torch.Tensor):
Scores with
shape
of
(n, )
.
Returns:
dict[str, torch.Tensor]: Bbox results in cpu mode.
...
...
mmdet3d/core/evaluation/indoor_eval.py
View file @
21cb2aa6
...
...
@@ -8,14 +8,16 @@ def average_precision(recalls, precisions, mode='area'):
"""Calculate average precision (for single or multiple scales).
Args:
recalls (np.ndarray): shape (num_scales, num_dets) or (num_dets, )
precisions (np.ndarray): shape (num_scales, num_dets) or (num_dets, )
recalls (np.ndarray): Recalls with shape of (num_scales, num_dets)
\
or (num_dets, ).
precisions (np.ndarray): Precisions with shape of
\
(num_scales, num_dets) or (num_dets, ).
mode (str): 'area' or '11points', 'area' means calculating the area
under precision-recall curve, '11points' means calculating
the average precision of recalls at [0, 0.1, ..., 1]
Returns:
float or np.ndarray:
c
alculated average precision
float or np.ndarray:
C
alculated average precision
.
"""
if
recalls
.
ndim
==
1
:
recalls
=
recalls
[
np
.
newaxis
,
:]
...
...
@@ -55,14 +57,14 @@ def eval_det_cls(pred, gt, iou_thr=None):
single class.
Args:
pred (dict): {img_id: [(bbox, score)]} where bbox is numpy array.
gt (dict): {img_id: [bbox]}.
iou_thr (list[float]): a list, iou threshold.
pred (dict): Predictions mapping from image id to bounding boxes
\
and scores.
gt (dict): Ground truths mapping from image id to bounding boxes.
iou_thr (list[float]): A list of iou thresholds.
Return:
np.ndarray: numpy array of length nd.
np.ndarray: numpy array of length nd.
float: scalar, average precision.
tuple (np.ndarray, np.ndarray, float): Recalls, precisions and
\
average precision.
"""
# {img_id: {'bbox': box structure, 'det': matched list}}
...
...
@@ -167,7 +169,8 @@ def eval_map_recall(pred, gt, ovthresh=None):
Args:
pred (dict): Information of detection results,
which maps class_id and predictions.
gt (dict): information of gt results, which maps class_id and gt.
gt (dict): Information of ground truths, which maps class_id and
\
ground truths.
ovthresh (list[float]): iou threshold.
Default: None.
...
...
@@ -213,7 +216,7 @@ def indoor_eval(gt_annos,
dt_annos (list[dict]): Detection annotations. the dict
includes the following keys
- labels_3d (torch.Tensor): Labels of boxes.
- boxes_3d (BaseInstance3DBoxes): 3
d
bboxes in Depth coordinate.
- boxes_3d (BaseInstance3DBoxes): 3
D
bboxes in Depth coordinate.
- scores_3d (torch.Tensor): Scores of boxes.
metric (list[float]): AP IoU thresholds.
label2cat (dict): {label: cat}.
...
...
mmdet3d/core/evaluation/lyft_eval.py
View file @
21cb2aa6
...
...
@@ -142,6 +142,7 @@ def get_classwise_aps(gt, predictions, class_names, iou_thresholds):
"""Returns an array with an average precision per class.
Note: Ground truth and predictions should have the following format.
.. code-block::
gt = [{
...
...
@@ -200,13 +201,13 @@ def get_single_class_aps(gt, predictions, iou_thresholds):
Args:
gt (list[dict]): list of dictionaries in the format described above.
predictions (list[dict]): list of dictionaries in the format
predictions (list[dict]): list of dictionaries in the format
\
described below.
iou_thresholds (list[float]): IOU thresholds used to calculate
iou_thresholds (list[float]): IOU thresholds used to calculate
\
TP / FN
Returns:
tuple[np.ndarray]:
r
eturns (recalls, precisions, average precisions)
tuple[np.ndarray]:
R
eturns (recalls, precisions, average precisions)
for each class.
"""
num_gts
=
len
(
gt
)
...
...
mmdet3d/core/post_processing/merge_augs.py
View file @
21cb2aa6
...
...
@@ -10,18 +10,18 @@ def merge_aug_bboxes_3d(aug_results, img_metas, test_cfg):
Args:
aug_results (list[dict]): The dict of detection results.
The dict contains the following keys
- boxes_3d (:obj:`BaseInstance3DBoxes`):
d
etection bbox
- scores_3d (torch.Tensor):
d
etection scores
- labels_3d (torch.Tensor):
p
redicted box labels
img_metas (list[dict]): Meta information of each sample
- boxes_3d (:obj:`BaseInstance3DBoxes`):
D
etection bbox
.
- scores_3d (torch.Tensor):
D
etection scores
.
- labels_3d (torch.Tensor):
P
redicted box labels
.
img_metas (list[dict]): Meta information of each sample
.
test_cfg (dict): Test config.
Returns:
dict:
b
box results in cpu mode, containing
the
merged results
dict:
Bounding
box
es
results in cpu mode, containing merged results
.
- boxes_3d (:obj:`BaseInstance3DBoxes`):
m
erged detection bbox
- scores_3d (torch.Tensor):
m
erged detection scores
- labels_3d (torch.Tensor):
m
erged predicted box labels
- boxes_3d (:obj:`BaseInstance3DBoxes`):
M
erged detection bbox
.
- scores_3d (torch.Tensor):
M
erged detection scores
.
- labels_3d (torch.Tensor):
M
erged predicted box labels
.
"""
assert
len
(
aug_results
)
==
len
(
img_metas
),
\
...
...
mmdet3d/core/voxel/voxel_generator.py
View file @
21cb2aa6
...
...
@@ -40,22 +40,22 @@ class VoxelGenerator(object):
@
property
def
voxel_size
(
self
):
"""list[float]:
s
ize of a single voxel"""
"""list[float]:
S
ize of a single voxel
.
"""
return
self
.
_voxel_size
@
property
def
max_num_points_per_voxel
(
self
):
"""int:
m
aximum number of points per voxel"""
"""int:
M
aximum number of points per voxel
.
"""
return
self
.
_max_num_points
@
property
def
point_cloud_range
(
self
):
"""list[float]:
r
ange of point cloud"""
"""list[float]:
R
ange of point cloud
.
"""
return
self
.
_point_cloud_range
@
property
def
grid_size
(
self
):
"""np.ndarray: The size of grids"""
"""np.ndarray: The size of grids
.
"""
return
self
.
_grid_size
...
...
@@ -68,18 +68,18 @@ def points_to_voxel(points,
"""convert kitti points(N, >=3) to voxels.
Args:
points (np.ndarray): [N, ndim]. points[:, :3] contain xyz points and
points (np.ndarray): [N, ndim]. points[:, :3] contain xyz points and
\
points[:, 3:] contain other information such as reflectivity.
voxel_size (list, tuple, np.ndarray): [3] xyz, indicate voxel size
coors_range (list[float | tuple[float] | ndarray]): Voxel range.
coors_range (list[float | tuple[float] | ndarray]): Voxel range.
\
format: xyzxyz, minmax
max_points (int): Indicate maximum points contained in a voxel.
reverse_index (bool): Whether return reversed coordinates.
if points has xyz format and reverse_index is True, output
coordinates will be zyx format, but points in features always
reverse_index (bool): Whether return reversed coordinates.
\
if points has xyz format and reverse_index is True, output
\
coordinates will be zyx format, but points in features always
\
xyz format.
max_voxels (int): Maximum number of voxels this function create
.
f
or second, 20000 is a good choice. Points should be shuffled for
max_voxels (int): Maximum number of voxels this function create
s.
\
F
or second, 20000 is a good choice. Points should be shuffled for
\
randomness before this function because max_voxels drops points.
Returns:
...
...
@@ -133,20 +133,20 @@ def _points_to_voxel_reverse_kernel(points,
"""convert kitti points(N, >=3) to voxels.
Args:
points (np.ndarray): [N, ndim]. points[:, :3] contain xyz points and
points (np.ndarray): [N, ndim]. points[:, :3] contain xyz points and
\
points[:, 3:] contain other information such as reflectivity.
voxel_size (list, tuple, np.ndarray): [3] xyz, indicate voxel size
coors_range (list[float | tuple[float] | ndarray]): Range of voxels.
voxel_size (list, tuple, np.ndarray): [3] xyz, indicate voxel size
\
coors_range (list[float | tuple[float] | ndarray]): Range of voxels.
\
format: xyzxyz, minmax
num_points_per_voxel (int): Number of points per voxel.
coor_to_voxel_idx (np.ndarray): A voxel grid of shape (D, H, W),
which
has the same shape as the complete voxel map. It indicates
the
index of each corresponding voxel.
coor_to_voxel_idx (np.ndarray): A voxel grid of shape (D, H, W),
\
which
has the same shape as the complete voxel map. It indicates
\
the
index of each corresponding voxel.
voxels (np.ndarray): Created empty voxels.
coors (np.ndarray): Created coordinates of each voxel.
max_points (int): Indicate maximum points contained in a voxel.
max_voxels (int): Maximum number of voxels this function create.
for second, 20000 is a good choice. Points should be shuffled for
max_voxels (int): Maximum number of voxels this function create.
\
for second, 20000 is a good choice. Points should be shuffled for
\
randomness before this function because max_voxels drops points.
Returns:
...
...
@@ -207,20 +207,20 @@ def _points_to_voxel_kernel(points,
"""convert kitti points(N, >=3) to voxels.
Args:
points (np.ndarray): [N, ndim]. points[:, :3] contain xyz points and
points (np.ndarray): [N, ndim]. points[:, :3] contain xyz points and
\
points[:, 3:] contain other information such as reflectivity.
voxel_size (list, tuple, np.ndarray): [3] xyz, indicate voxel size
coors_range (list[float | tuple[float] | ndarray]): Range of voxels.
voxel_size (list, tuple, np.ndarray): [3] xyz, indicate voxel size
.
coors_range (list[float | tuple[float] | ndarray]): Range of voxels.
\
format: xyzxyz, minmax
num_points_per_voxel (int): Number of points per voxel.
coor_to_voxel_idx (np.ndarray): A voxel grid of shape (D, H, W),
which
has the same shape as the complete voxel map. It indicates
the
index of each corresponding voxel.
coor_to_voxel_idx (np.ndarray): A voxel grid of shape (D, H, W),
\
which
has the same shape as the complete voxel map. It indicates
\
the
index of each corresponding voxel.
voxels (np.ndarray): Created empty voxels.
coors (np.ndarray): Created coordinates of each voxel.
max_points (int): Indicate maximum points contained in a voxel.
max_voxels (int): Maximum number of voxels this function create.
for second, 20000 is a good choice. Points should be shuffled for
max_voxels (int): Maximum number of voxels this function create.
\
for second, 20000 is a good choice. Points should be shuffled for
\
randomness before this function because max_voxels drops points.
Returns:
...
...
mmdet3d/datasets/custom_3d.py
View file @
21cb2aa6
...
...
@@ -30,9 +30,9 @@ class Custom3DDataset(Dataset):
to its original format then converted them to `box_type_3d`.
Defaults to 'LiDAR'. Available options includes
- 'LiDAR':
b
ox in LiDAR coordinates
- 'Depth':
b
ox in depth coordinates, usually for indoor dataset
- 'Camera':
b
ox in camera coordinates
- 'LiDAR':
B
ox in LiDAR coordinates
.
- 'Depth':
B
ox in depth coordinates, usually for indoor dataset
.
- 'Camera':
B
ox in camera coordinates
.
filter_empty_gt (bool, optional): Whether to filter empty GT.
Defaults to True.
test_mode (bool, optional): Whether the dataset is in test mode.
...
...
@@ -87,10 +87,10 @@ class Custom3DDataset(Dataset):
dict: Data information that will be passed to the data
\
preprocessing pipelines. It includes the following keys:
- sample_idx (str):
s
ample index
- pts_filename (str):
f
ilename of point clouds
- file_name (str):
f
ilename of point clouds
- ann_info (dict):
a
nnotation info
- sample_idx (str):
S
ample index
.
- pts_filename (str):
F
ilename of point clouds
.
- file_name (str):
F
ilename of point clouds
.
- ann_info (dict):
A
nnotation info
.
"""
info
=
self
.
data_infos
[
index
]
sample_idx
=
info
[
'point_cloud'
][
'lidar_idx'
]
...
...
@@ -114,15 +114,15 @@ class Custom3DDataset(Dataset):
Args:
results (dict): Dict before data preprocessing.
- img_fields (list):
i
mage fields
- bbox3d_fields (list): 3D bounding boxes fields
- pts_mask_fields (list):
m
ask fields of points
- pts_seg_fields (list):
m
ask fields of point segments
- bbox_fields (list):
f
ields of bounding boxes
- mask_fields (list):
f
ields of masks
- seg_fields (list):
s
egment fields
- box_type_3d (str): 3D box type
- box_mode_3d (str): 3D box mode
- img_fields (list):
I
mage fields
.
- bbox3d_fields (list): 3D bounding boxes fields
.
- pts_mask_fields (list):
M
ask fields of points
.
- pts_seg_fields (list):
M
ask fields of point segments
.
- bbox_fields (list):
F
ields of bounding boxes
.
- mask_fields (list):
F
ields of masks
.
- seg_fields (list):
S
egment fields
.
- box_type_3d (str): 3D box type
.
- box_mode_3d (str): 3D box mode
.
"""
results
[
'img_fields'
]
=
[]
results
[
'bbox3d_fields'
]
=
[]
...
...
@@ -179,7 +179,7 @@ class Custom3DDataset(Dataset):
a tuple or list, override the CLASSES defined by the dataset.
Return:
list[str]:
return the
list of class names
list[str]:
A
list of class names
.
"""
if
classes
is
None
:
return
cls
.
CLASSES
...
...
mmdet3d/datasets/kitti2d_dataset.py
View file @
21cb2aa6
...
...
@@ -80,10 +80,10 @@ class Kitti2DDataset(CustomDataset):
index (int): Index of the annotation data to get.
Returns:
dict:
a
nnotation information consists of the following keys:
dict:
A
nnotation information consists of the following keys:
- bboxes (np.ndarray):
g
round truth bboxes
- labels (np.ndarray):
l
abels of ground truths
- bboxes (np.ndarray):
G
round truth bboxes
.
- labels (np.ndarray):
L
abels of ground truths
.
"""
# Use index to get the annos, thus the evalhook could also use this api
info
=
self
.
data_infos
[
index
]
...
...
mmdet3d/datasets/kitti_dataset.py
View file @
21cb2aa6
...
...
@@ -37,9 +37,9 @@ class KittiDataset(Custom3DDataset):
to its original format then converted them to `box_type_3d`.
Defaults to 'LiDAR' in this dataset. Available options includes
- 'LiDAR':
b
ox in LiDAR coordinates
- 'Depth':
b
ox in depth coordinates, usually for indoor dataset
- 'Camera':
b
ox in camera coordinates
- 'LiDAR':
B
ox in LiDAR coordinates
.
- 'Depth':
B
ox in depth coordinates, usually for indoor dataset
.
- 'Camera':
B
ox in camera coordinates
.
filter_empty_gt (bool, optional): Whether to filter empty GT.
Defaults to True.
test_mode (bool, optional): Whether the dataset is in test mode.
...
...
@@ -88,13 +88,13 @@ class KittiDataset(Custom3DDataset):
dict: Data information that will be passed to the data
\
preprocessing pipelines. It includes the following keys:
- sample_idx (str):
s
ample index
- pts_filename (str):
f
ilename of point clouds
- img_prefix (str | None):
p
refix of image files
- img_info (dict):
i
mage info
- lidar2img (list[np.ndarray], optional):
t
ransformations
\
from lidar to different cameras
- ann_info (dict):
a
nnotation info
- sample_idx (str):
S
ample index
.
- pts_filename (str):
F
ilename of point clouds
.
- img_prefix (str | None):
P
refix of image files
.
- img_info (dict):
I
mage info
.
- lidar2img (list[np.ndarray], optional):
T
ransformations
\
from lidar to different cameras
.
- ann_info (dict):
A
nnotation info
.
"""
info
=
self
.
data_infos
[
index
]
sample_idx
=
info
[
'image'
][
'image_idx'
]
...
...
@@ -131,11 +131,11 @@ class KittiDataset(Custom3DDataset):
dict: annotation information consists of the following keys:
- gt_bboxes_3d (:obj:`LiDARInstance3DBoxes`):
\
3D ground truth bboxes
- gt_labels_3d (np.ndarray):
l
abels of ground truths
- gt_bboxes (np.ndarray): 2D ground truth bboxes
- gt_labels (np.ndarray):
l
abels of ground truths
- gt_names (list[str]):
c
lass names of ground truths
3D ground truth bboxes
.
- gt_labels_3d (np.ndarray):
L
abels of ground truths
.
- gt_bboxes (np.ndarray): 2D ground truth bboxes
.
- gt_labels (np.ndarray):
L
abels of ground truths
.
- gt_names (list[str]):
C
lass names of ground truths
.
"""
# Use index to get the annos, thus the evalhook could also use this api
info
=
self
.
data_infos
[
index
]
...
...
@@ -308,7 +308,7 @@ class KittiDataset(Custom3DDataset):
Default: None.
Returns:
dict[str, float]:
r
esults of each evaluation metric
dict[str, float]:
R
esults of each evaluation metric
.
"""
result_files
,
tmp_dir
=
self
.
format_results
(
results
,
pklfile_prefix
)
from
mmdet3d.core.evaluation
import
kitti_eval
...
...
@@ -629,7 +629,7 @@ class KittiDataset(Custom3DDataset):
"""Results visualization.
Args:
results (list[dict]):
l
ist of bounding boxes results.
results (list[dict]):
L
ist of bounding boxes results.
out_dir (str): Output directory of visualization result.
"""
assert
out_dir
is
not
None
,
'Expect out_dir, got none.'
...
...
mmdet3d/datasets/lyft_dataset.py
View file @
21cb2aa6
...
...
@@ -16,7 +16,7 @@ from .custom_3d import Custom3DDataset
@
DATASETS
.
register_module
()
class
LyftDataset
(
Custom3DDataset
):
"""Lyft Dataset.
r
"""Lyft Dataset.
This class serves as the API for experiments on the Lyft Dataset.
...
...
@@ -40,9 +40,9 @@ class LyftDataset(Custom3DDataset):
to its original format then converted them to `box_type_3d`.
Defaults to 'LiDAR' in this dataset. Available options includes
- 'LiDAR':
b
ox in LiDAR coordinates
- 'Depth':
b
ox in depth coordinates, usually for indoor dataset
- 'Camera':
b
ox in camera coordinates
- 'LiDAR':
B
ox in LiDAR coordinates
.
- 'Depth':
B
ox in depth coordinates, usually for indoor dataset
.
- 'Camera':
B
ox in camera coordinates
.
filter_empty_gt (bool, optional): Whether to filter empty GT.
Defaults to True.
test_mode (bool, optional): Whether the dataset is in test mode.
...
...
@@ -185,12 +185,12 @@ class LyftDataset(Custom3DDataset):
index (int): Index of the annotation data to get.
Returns:
dict:
a
nnotation information consists of the following keys:
dict:
A
nnotation information consists of the following keys:
- gt_bboxes_3d (:obj:`LiDARInstance3DBoxes`):
\
3D ground truth bboxes
- gt_labels_3d (np.ndarray):
l
abels of ground truths
- gt_names (list[str]):
c
lass names of ground truths
3D ground truth bboxes
.
- gt_labels_3d (np.ndarray):
L
abels of ground truths
.
- gt_names (list[str]):
C
lass names of ground truths
.
"""
info
=
self
.
data_infos
[
index
]
gt_bboxes_3d
=
info
[
'gt_boxes'
]
...
...
Prev
1
2
Next
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment