Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
OpenDAS
mmdetection3d
Commits
0fdcb06d
Commit
0fdcb06d
authored
May 14, 2020
by
liyinhao
Browse files
change names, fix sunrgbd bboxes_lidar_bottom bug
parent
f0e332bd
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
49 additions
and
45 deletions
+49
-45
mmdet3d/core/evaluation/indoor_eval.py
mmdet3d/core/evaluation/indoor_eval.py
+39
-38
tests/test_sunrgbd_dataset.py
tests/test_sunrgbd_dataset.py
+10
-7
No files found.
mmdet3d/core/evaluation/indoor_eval.py
View file @
0fdcb06d
...
@@ -56,9 +56,7 @@ def average_precision(recalls, precisions, mode='area'):
...
@@ -56,9 +56,7 @@ def average_precision(recalls, precisions, mode='area'):
Returns:
Returns:
float or ndarray: calculated average precision
float or ndarray: calculated average precision
"""
"""
no_scale
=
False
if
recalls
.
ndim
==
1
:
if
recalls
.
ndim
==
1
:
no_scale
=
True
recalls
=
recalls
[
np
.
newaxis
,
:]
recalls
=
recalls
[
np
.
newaxis
,
:]
precisions
=
precisions
[
np
.
newaxis
,
:]
precisions
=
precisions
[
np
.
newaxis
,
:]
assert
recalls
.
shape
==
precisions
.
shape
and
recalls
.
ndim
==
2
assert
recalls
.
shape
==
precisions
.
shape
and
recalls
.
ndim
==
2
...
@@ -85,8 +83,6 @@ def average_precision(recalls, precisions, mode='area'):
...
@@ -85,8 +83,6 @@ def average_precision(recalls, precisions, mode='area'):
else
:
else
:
raise
ValueError
(
raise
ValueError
(
'Unrecognized mode, only "area" and "11points" are supported'
)
'Unrecognized mode, only "area" and "11points" are supported'
)
if
no_scale
:
ap
=
ap
[
0
]
return
ap
return
ap
...
@@ -100,9 +96,9 @@ def eval_det_cls(pred, gt, ovthresh=None):
...
@@ -100,9 +96,9 @@ def eval_det_cls(pred, gt, ovthresh=None):
ovthresh (List[float]): a list, iou threshold
ovthresh (List[float]): a list, iou threshold
Return:
Return:
rec (
ndarray
)
: numpy array of length nd
ndarray: numpy array of length nd
prec (
ndarray
)
: numpy array of length nd
ndarray: numpy array of length nd
ap (
float
)
: scalar, average precision
float: scalar, average precision
"""
"""
# construct gt objects
# construct gt objects
...
@@ -186,40 +182,32 @@ def eval_det_cls(pred, gt, ovthresh=None):
...
@@ -186,40 +182,32 @@ def eval_det_cls(pred, gt, ovthresh=None):
# compute precision recall
# compute precision recall
fp
=
np
.
cumsum
(
fp_thresh
[
iou_idx
])
fp
=
np
.
cumsum
(
fp_thresh
[
iou_idx
])
tp
=
np
.
cumsum
(
tp_thresh
[
iou_idx
])
tp
=
np
.
cumsum
(
tp_thresh
[
iou_idx
])
rec
=
tp
/
float
(
npos
)
rec
all
=
tp
/
float
(
npos
)
# avoid divide by zero in case the first detection matches a difficult
# avoid divide by zero in case the first detection matches a difficult
# ground truth
# ground truth
prec
=
tp
/
np
.
maximum
(
tp
+
fp
,
np
.
finfo
(
np
.
float64
).
eps
)
prec
ision
=
tp
/
np
.
maximum
(
tp
+
fp
,
np
.
finfo
(
np
.
float64
).
eps
)
ap
=
average_precision
(
rec
,
prec
)
ap
=
average_precision
(
rec
all
,
prec
ision
)
ret
.
append
((
rec
,
prec
,
ap
))
ret
.
append
((
rec
all
,
prec
ision
,
ap
))
return
ret
return
ret
def
eval_det_cls_wrapper
(
arguments
):
def
eval_map_recall
(
det_infos
,
gt_infos
,
ovthresh
=
None
):
pred
,
gt
,
ovthresh
=
arguments
ret
=
eval_det_cls
(
pred
,
gt
,
ovthresh
)
return
ret
def
eval_map_rec
(
det_infos
,
gt_infos
,
ovthresh
=
None
):
"""Evaluate mAP and Recall.
"""Evaluate mAP and Recall.
Generic functions to compute precision/recall for object detection
Generic functions to compute precision/recall for object detection
for multiple classes.
for multiple classes.
Args:
Args:
pred_all (dict): map of {img_id: [(classname, bbox, score)]}
.
det_infos (List): Label, bbox and score of the detection result
.
gt_
all (
dict):
map of {img_id: [(classname, bbox)]}
.
gt_
infos (List[
dict
]
):
Information of the ground truth
.
ovthresh (List[float]): iou threshold.
ovthresh (List[float]): iou threshold.
Default: None.
Default: None.
get_iou_func (func): The function to get iou.
Default: get_iou_gpu.
Return:
Return:
rec (
dict
)
: {classname: rec}.
dict: {classname: rec}.
prec (
dict
)
: {classname: prec_all}.
dict: {classname: prec_all}.
ap (
dict
)
: {classname: scalar}.
dict: {classname: scalar}.
"""
"""
pred_all
=
{}
pred_all
=
{}
gt_all
=
{}
gt_all
=
{}
...
@@ -251,6 +239,7 @@ def eval_map_rec(det_infos, gt_infos, ovthresh=None):
...
@@ -251,6 +239,7 @@ def eval_map_rec(det_infos, gt_infos, ovthresh=None):
if
img_id
not
in
gt
[
label
]:
if
img_id
not
in
gt
[
label
]:
gt
[
int
(
label
)][
img_id
]
=
[]
gt
[
int
(
label
)][
img_id
]
=
[]
pred
[
int
(
label
)][
img_id
].
append
((
bbox
,
score
))
pred
[
int
(
label
)][
img_id
].
append
((
bbox
,
score
))
for
img_id
in
gt_all
.
keys
():
for
img_id
in
gt_all
.
keys
():
for
label
,
bbox
in
gt_all
[
img_id
]:
for
label
,
bbox
in
gt_all
[
img_id
]:
if
label
not
in
gt
:
if
label
not
in
gt
:
...
@@ -260,25 +249,25 @@ def eval_map_rec(det_infos, gt_infos, ovthresh=None):
...
@@ -260,25 +249,25 @@ def eval_map_rec(det_infos, gt_infos, ovthresh=None):
gt
[
label
][
img_id
].
append
(
bbox
)
gt
[
label
][
img_id
].
append
(
bbox
)
ret_values
=
[]
ret_values
=
[]
args
=
[(
pred
[
classname
],
gt
[
classname
],
ovthresh
)
for
classname
in
gt
.
keys
():
for
classname
in
gt
.
keys
()
if
classname
in
pred
]
if
classname
in
pred
:
rec
=
[{}
for
i
in
ovthresh
]
ret_values
.
append
(
prec
=
[{}
for
i
in
ovthresh
]
eval_det_cls
(
pred
[
classname
],
gt
[
classname
],
ovthresh
))
recall
=
[{}
for
i
in
ovthresh
]
precision
=
[{}
for
i
in
ovthresh
]
ap
=
[{}
for
i
in
ovthresh
]
ap
=
[{}
for
i
in
ovthresh
]
for
arg
in
args
:
ret_values
.
append
(
eval_det_cls_wrapper
(
arg
))
for
i
,
label
in
enumerate
(
gt
.
keys
()):
for
i
,
label
in
enumerate
(
gt
.
keys
()):
for
iou_idx
,
thresh
in
enumerate
(
ovthresh
):
for
iou_idx
,
thresh
in
enumerate
(
ovthresh
):
if
label
in
pred
:
if
label
in
pred
:
rec
[
iou_idx
][
label
],
prec
[
iou_idx
][
label
],
ap
[
iou_idx
][
rec
all
[
iou_idx
][
label
],
prec
ision
[
iou_idx
][
label
],
ap
[
iou_idx
][
label
]
=
ret_values
[
i
][
iou_idx
]
label
]
=
ret_values
[
i
][
iou_idx
]
else
:
else
:
rec
[
iou_idx
][
label
]
=
0
rec
all
[
iou_idx
][
label
]
=
0
prec
[
iou_idx
][
label
]
=
0
prec
ision
[
iou_idx
][
label
]
=
0
ap
[
iou_idx
][
label
]
=
0
ap
[
iou_idx
][
label
]
=
0
return
rec
,
prec
,
ap
return
rec
all
,
prec
ision
,
ap
def
indoor_eval
(
gt_annos
,
dt_annos
,
metric
,
label2cat
):
def
indoor_eval
(
gt_annos
,
dt_annos
,
metric
,
label2cat
):
...
@@ -293,9 +282,8 @@ def indoor_eval(gt_annos, dt_annos, metric, label2cat):
...
@@ -293,9 +282,8 @@ def indoor_eval(gt_annos, dt_annos, metric, label2cat):
label2cat (dict): {label: cat}.
label2cat (dict): {label: cat}.
Return:
Return:
ret_dict (dict)
: Dict of results.
dict
: Dict of results.
"""
"""
for
gt_anno
in
gt_annos
:
for
gt_anno
in
gt_annos
:
if
gt_anno
[
'gt_num'
]
!=
0
:
if
gt_anno
[
'gt_num'
]
!=
0
:
# convert to lidar coor for evaluation
# convert to lidar coor for evaluation
...
@@ -304,10 +292,23 @@ def indoor_eval(gt_annos, dt_annos, metric, label2cat):
...
@@ -304,10 +292,23 @@ def indoor_eval(gt_annos, dt_annos, metric, label2cat):
if
gt_anno
[
'gt_boxes_upright_depth'
].
shape
[
-
1
]
==
6
:
if
gt_anno
[
'gt_boxes_upright_depth'
].
shape
[
-
1
]
==
6
:
gt_anno
[
'gt_boxes_upright_depth'
]
=
np
.
pad
(
gt_anno
[
'gt_boxes_upright_depth'
]
=
np
.
pad
(
bbox_lidar_bottom
,
((
0
,
0
),
(
0
,
1
)),
'constant'
)
bbox_lidar_bottom
,
((
0
,
0
),
(
0
,
1
)),
'constant'
)
else
:
gt_anno
[
'gt_boxes_upright_depth'
]
=
bbox_lidar_bottom
# gt_infos = []
# for gt_anno in gt_annos:
# if gt_anno['gt_num'] != 0:
# # convert to lidar coor for evaluation
# bbox_lidar_bottom = boxes3d_depth_to_lidar(
# gt_anno['gt_boxes_upright_depth'], mid_to_bottom=True)
# if bbox_lidar_bottom.shape[-1] == 6:
# bbox_lidar_bottom= np.pad(
# bbox_lidar_bottom, ((0, 0), (0, 1)), 'constant')
# for i in range(gt_anno['gt_num']):
# gt_infos.append([gt_anno['class'][i], bbox_lidar_bottom[i]])
result_str
=
str
()
result_str
=
str
()
result_str
+=
'mAP'
result_str
+=
'mAP'
rec
,
prec
,
ap
=
eval_map_rec
(
dt_annos
,
gt_annos
,
metric
)
rec
,
prec
,
ap
=
eval_map_rec
all
(
dt_annos
,
gt_annos
,
metric
)
ret_dict
=
{}
ret_dict
=
{}
for
i
,
iou_thresh
in
enumerate
(
metric
):
for
i
,
iou_thresh
in
enumerate
(
metric
):
for
label
in
ap
[
i
].
keys
():
for
label
in
ap
[
i
].
keys
():
...
...
tests/test_sunrgbd_dataset.py
View file @
0fdcb06d
...
@@ -70,13 +70,16 @@ def test_evaluate():
...
@@ -70,13 +70,16 @@ def test_evaluate():
sunrgbd_dataset
=
SUNRGBDDataset
(
root_path
,
ann_file
)
sunrgbd_dataset
=
SUNRGBDDataset
(
root_path
,
ann_file
)
results
=
[]
results
=
[]
pred_boxes
=
dict
()
pred_boxes
=
dict
()
pred_boxes
[
'box3d_lidar'
]
=
np
.
array
([[
pred_boxes
[
'box3d_lidar'
]
=
np
.
array
(
1.047307
,
4.168696
,
-
0.246859
,
2.30207
,
1.887584
,
1.969614
,
1.69564944
[[
],
[
4.168696
,
-
1.047307
,
-
1.231666
,
1.887584
,
2.30207
,
1.969614
,
2.583086
,
4.811675
,
-
0.786667
,
0.585172
,
0.883176
,
0.973334
,
1.64999513
1.69564944
],
[
],
-
1.086364
,
1.904545
,
-
0.147727
,
0.71281
,
1.563134
,
2.104546
,
0.1022069
[
]])
4.811675
,
-
2.583086
,
-
1.273334
,
0.883176
,
0.585172
,
0.973334
,
1.64999513
],
[
1.904545
,
1.086364
,
-
1.2
,
1.563134
,
0.71281
,
2.104546
,
0.1022069
]])
pred_boxes
[
'label_preds'
]
=
torch
.
Tensor
([
0
,
7
,
6
]).
cuda
()
pred_boxes
[
'label_preds'
]
=
torch
.
Tensor
([
0
,
7
,
6
]).
cuda
()
pred_boxes
[
'scores'
]
=
torch
.
Tensor
([
0.5
,
1.0
,
1.0
]).
cuda
()
pred_boxes
[
'scores'
]
=
torch
.
Tensor
([
0.5
,
1.0
,
1.0
]).
cuda
()
results
.
append
([
pred_boxes
])
results
.
append
([
pred_boxes
])
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment