Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
OpenDAS
mmdetection3d
Commits
7cd3060e
Commit
7cd3060e
authored
May 13, 2020
by
liyinhao
Browse files
finish sunrgbd
parent
8b3b1104
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
35 additions
and
3 deletions
+35
-3
mmdet3d/core/evaluation/indoor_utils/eval.py
mmdet3d/core/evaluation/indoor_utils/eval.py
+3
-3
tests/test_sunrgbd_dataset.py
tests/test_sunrgbd_dataset.py
+32
-0
No files found.
mmdet3d/core/evaluation/indoor_utils/eval.py
View file @
7cd3060e
...
@@ -386,9 +386,9 @@ def indoor_eval(gt_annos, dt_annos, metric, class2type):
...
@@ -386,9 +386,9 @@ def indoor_eval(gt_annos, dt_annos, metric, class2type):
# convert to lidar coor for evaluation
# convert to lidar coor for evaluation
bbox_lidar_bottom
=
boxes3d_depth_to_lidar
(
bbox_lidar_bottom
=
boxes3d_depth_to_lidar
(
gt_anno
[
'gt_boxes_upright_depth'
],
mid_to_bottom
=
True
)
gt_anno
[
'gt_boxes_upright_depth'
],
mid_to_bottom
=
True
)
gt_anno
[
'gt_boxes_upright_depth'
]
=
np
.
pad
(
bbox_lidar_bottom
,
if
gt_anno
[
'gt_boxes_upright_depth'
]
.
shape
[
-
1
]
==
6
:
((
0
,
0
),
(
0
,
1
)),
gt_anno
[
'gt_boxes_upright_depth'
]
=
np
.
pad
(
'constant'
)
bbox_lidar_bottom
,
((
0
,
0
),
(
0
,
1
)),
'constant'
)
ap_iou_thresholds
=
metric
[
'AP_IOU_THRESHHOLDS'
]
ap_iou_thresholds
=
metric
[
'AP_IOU_THRESHHOLDS'
]
ap_calculator
=
APCalculator
(
ap_iou_thresholds
,
class2type
)
ap_calculator
=
APCalculator
(
ap_iou_thresholds
,
class2type
)
ap_calculator
.
step
(
dt_annos
,
gt_annos
)
ap_calculator
.
step
(
dt_annos
,
gt_annos
)
...
...
tests/test_sunrgbd_dataset.py
View file @
7cd3060e
import
numpy
as
np
import
numpy
as
np
import
pytest
import
torch
from
mmdet3d.datasets
import
SunrgbdDataset
from
mmdet3d.datasets
import
SunrgbdDataset
...
@@ -57,3 +59,33 @@ def test_getitem():
...
@@ -57,3 +59,33 @@ def test_getitem():
assert
np
.
allclose
(
points
,
expected_points
)
assert
np
.
allclose
(
points
,
expected_points
)
assert
np
.
allclose
(
gt_bboxes_3d
,
expected_gt_bboxes_3d
)
assert
np
.
allclose
(
gt_bboxes_3d
,
expected_gt_bboxes_3d
)
assert
np
.
all
(
gt_labels
.
numpy
()
==
expected_gt_labels
)
assert
np
.
all
(
gt_labels
.
numpy
()
==
expected_gt_labels
)
def
test_evaluate
():
if
not
torch
.
cuda
.
is_available
():
pytest
.
skip
()
root_path
=
'./tests/data/sunrgbd'
ann_file
=
'./tests/data/sunrgbd/sunrgbd_infos.pkl'
sunrgbd_dataset
=
SunrgbdDataset
(
root_path
,
ann_file
)
results
=
[]
pred_boxes
=
dict
()
pred_boxes
[
'box3d_lidar'
]
=
np
.
array
([[
1.047307
,
4.168696
,
-
0.246859
,
2.30207
,
1.887584
,
1.969614
,
1.69564944
],
[
2.583086
,
4.811675
,
-
0.786667
,
0.585172
,
0.883176
,
0.973334
,
1.64999513
],
[
-
1.086364
,
1.904545
,
-
0.147727
,
0.71281
,
1.563134
,
2.104546
,
0.1022069
]])
pred_boxes
[
'label_preds'
]
=
torch
.
Tensor
([
0
,
7
,
6
]).
cuda
()
pred_boxes
[
'scores'
]
=
torch
.
Tensor
([
0.5
,
1.0
,
1.0
]).
cuda
()
results
.
append
([
pred_boxes
])
metric
=
dict
()
metric
[
'AP_IOU_THRESHHOLDS'
]
=
[
0.25
,
0.5
]
ap_dict
=
sunrgbd_dataset
.
evaluate
(
results
,
metric
)
bed_precision_25
=
ap_dict
[
'bed Average Precision 25'
]
dresser_precision_25
=
ap_dict
[
'dresser Average Precision 25'
]
night_stand_precision_25
=
ap_dict
[
'night_stand Average Precision 25'
]
assert
abs
(
bed_precision_25
-
1
)
<
0.01
assert
abs
(
dresser_precision_25
-
1
)
<
0.01
assert
abs
(
night_stand_precision_25
-
1
)
<
0.01
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment