Commit 2b3998cf authored by liyinhao's avatar liyinhao
Browse files

change scannet and sunrgbd dataset unittest

parent ae0d138d
......@@ -5,7 +5,7 @@ from mmdet3d.core.bbox.iou_calculators.iou3d_calculator import bbox_overlaps_3d
def boxes3d_depth_to_lidar(boxes3d, mid_to_bottom=True):
""" Boxes3d Depth to Lidar.
"""Boxes3d Depth to Lidar.
Flip X-right,Y-forward,Z-up to X-forward,Y-left,Z-up.
......@@ -86,14 +86,14 @@ def average_precision(recalls, precisions, mode='area'):
return ap
def eval_det_cls(pred, gt, ovthresh=None):
def eval_det_cls(pred, gt, iou_thr=None):
"""Generic functions to compute precision/recall for object detection
for a single class.
Args:
pred (dict): {img_id: [(bbox, score)]} where bbox is numpy array.
gt (dict): {img_id: [bbox]}.
ovthresh (List[float]): a list, iou threshold.
iou_thr (List[float]): a list, iou threshold.
Return:
ndarray: numpy array of length nd.
......@@ -106,7 +106,7 @@ def eval_det_cls(pred, gt, ovthresh=None):
npos = 0
for img_id in gt.keys():
bbox = np.array(gt[img_id])
det = [[False] * len(bbox) for i in ovthresh]
det = [[False] * len(bbox) for i in iou_thr]
npos += len(bbox)
class_recs[img_id] = {'bbox': bbox, 'det': det}
# pad empty list to all other imgids
......@@ -150,11 +150,11 @@ def eval_det_cls(pred, gt, ovthresh=None):
# go down dets and mark TPs and FPs
nd = len(image_ids)
tp_thresh = [np.zeros(nd) for i in ovthresh]
fp_thresh = [np.zeros(nd) for i in ovthresh]
tp_thr = [np.zeros(nd) for i in iou_thr]
fp_thr = [np.zeros(nd) for i in iou_thr]
for d in range(nd):
R = class_recs[image_ids[d]]
ovmax = -np.inf
iou_max = -np.inf
BBGT = R['bbox'].astype(float)
cur_iou = ious[d]
......@@ -163,25 +163,25 @@ def eval_det_cls(pred, gt, ovthresh=None):
for j in range(BBGT.shape[0]):
# iou = get_iou_main(get_iou_func, (bb, BBGT[j,...]))
iou = cur_iou[j]
if iou > ovmax:
ovmax = iou
if iou > iou_max:
iou_max = iou
jmax = j
for iou_idx, thresh in enumerate(ovthresh):
if ovmax > thresh:
for iou_idx, thresh in enumerate(iou_thr):
if iou_max > thresh:
if not R['det'][iou_idx][jmax]:
tp_thresh[iou_idx][d] = 1.
tp_thr[iou_idx][d] = 1.
R['det'][iou_idx][jmax] = 1
else:
fp_thresh[iou_idx][d] = 1.
fp_thr[iou_idx][d] = 1.
else:
fp_thresh[iou_idx][d] = 1.
fp_thr[iou_idx][d] = 1.
ret = []
for iou_idx, thresh in enumerate(ovthresh):
for iou_idx, thresh in enumerate(iou_thr):
# compute precision recall
fp = np.cumsum(fp_thresh[iou_idx])
tp = np.cumsum(tp_thresh[iou_idx])
fp = np.cumsum(fp_thr[iou_idx])
tp = np.cumsum(tp_thr[iou_idx])
recall = tp / float(npos)
# avoid divide by zero in case the first detection matches a difficult
# ground truth
......
import copy
import mmcv
import numpy as np
import torch.utils.data as torch_data
......@@ -26,8 +24,6 @@ class IndoorBaseDataset(torch_data.Dataset):
mmcv.check_file_exist(ann_file)
self.data_infos = mmcv.load(ann_file)
# dataset config
self.num_class = len(self.CLASSES)
if pipeline is not None:
self.pipeline = Compose(pipeline)
self.with_label = with_label
......@@ -79,7 +75,8 @@ class IndoorBaseDataset(torch_data.Dataset):
@classmethod
def get_classes(cls, classes=None):
"""Get class names of current dataset
"""Get class names of current dataset.
Args:
classes (Sequence[str] | str | None): If classes is None, use
default CLASSES defined by builtin dataset. If classes is a
......@@ -116,7 +113,7 @@ class IndoorBaseDataset(torch_data.Dataset):
box3d_depth = pred_boxes['box3d_lidar']
if box3d_depth is not None:
label_preds = pred_boxes['label_preds']
scores = pred_boxes['scores'].detach().cpu().numpy()
scores = pred_boxes['scores']
label_preds = label_preds.detach().cpu().numpy()
num_proposal = box3d_depth.shape[0]
for j in range(num_proposal):
......@@ -149,6 +146,6 @@ class IndoorBaseDataset(torch_data.Dataset):
results = self.format_results(results)
from mmdet3d.core.evaluation import indoor_eval
assert len(metric) > 0
gt_annos = [copy.deepcopy(info['annos']) for info in self.data_infos]
gt_annos = [info['annos'] for info in self.data_infos]
ret_dict = indoor_eval(gt_annos, results, metric, self.label2cat)
return ret_dict
......@@ -34,11 +34,11 @@ class ScanNetDataset(IndoorBaseDataset):
if info['annos']['gt_num'] != 0:
gt_bboxes_3d = info['annos']['gt_boxes_upright_depth'] # k, 6
gt_labels = info['annos']['class']
gt_bboxes_3d_mask = np.ones_like(gt_labels).astype(np.bool)
gt_bboxes_3d_mask = np.ones_like(gt_labels, dtype=np.bool)
else:
gt_bboxes_3d = np.zeros((1, 6), dtype=np.float32)
gt_labels = np.zeros(1, ).astype(np.bool)
gt_bboxes_3d_mask = np.zeros(1, ).astype(np.bool)
gt_labels = np.zeros(1, dtype=np.bool)
gt_bboxes_3d_mask = np.zeros(1, dtype=np.bool)
pts_instance_mask_path = osp.join(self.root_path,
f'{sample_idx}_ins_label.npy')
pts_semantic_mask_path = osp.join(self.root_path,
......
......@@ -33,11 +33,11 @@ class SUNRGBDDataset(IndoorBaseDataset):
if info['annos']['gt_num'] != 0:
gt_bboxes_3d = info['annos']['gt_boxes_upright_depth'] # k, 6
gt_labels = info['annos']['class']
gt_bboxes_3d_mask = np.ones_like(gt_labels).astype(np.bool)
gt_bboxes_3d_mask = np.ones_like(gt_labels, dtype=np.bool)
else:
gt_bboxes_3d = np.zeros((1, 6), dtype=np.float32)
gt_labels = np.zeros(1, ).astype(np.bool)
gt_bboxes_3d_mask = np.zeros(1, ).astype(np.bool)
gt_labels = np.zeros(1, dtype=np.bool)
gt_bboxes_3d_mask = np.zeros(1, dtype=np.bool)
anns_results = dict(
gt_bboxes_3d=gt_bboxes_3d,
......
......@@ -63,13 +63,36 @@ def test_getitem():
])
expected_pts_semantic_mask = np.array([3, 1, 2, 2, 15])
expected_pts_instance_mask = np.array([44, 22, 10, 10, 57])
original_classes = scannet_dataset.CLASSES
assert scannet_dataset.CLASSES == class_names
assert np.allclose(points, expected_points)
assert gt_bboxes_3d[:5].shape == (5, 6)
assert np.allclose(gt_bboxes_3d[:5], expected_gt_bboxes_3d)
assert np.all(gt_labels.numpy() == expected_gt_labels)
assert np.all(pts_semantic_mask.numpy() == expected_pts_semantic_mask)
assert np.all(pts_instance_mask.numpy() == expected_pts_instance_mask)
assert original_classes == class_names
scannet_dataset = ScanNetDataset(
root_path, ann_file, pipeline=None, classes=['cabinet', 'bed'])
assert scannet_dataset.CLASSES != original_classes
assert scannet_dataset.CLASSES == ['cabinet', 'bed']
scannet_dataset = ScanNetDataset(
root_path, ann_file, pipeline=None, classes=('cabinet', 'bed'))
assert scannet_dataset.CLASSES != original_classes
assert scannet_dataset.CLASSES == ('cabinet', 'bed')
import tempfile
tmp_file = tempfile.NamedTemporaryFile()
with open(tmp_file.name, 'w') as f:
f.write('cabinet\nbed\n')
scannet_dataset = ScanNetDataset(
root_path, ann_file, pipeline=None, classes=tmp_file.name)
assert scannet_dataset.CLASSES != original_classes
assert scannet_dataset.CLASSES == ['cabinet', 'bed']
def test_evaluate():
......
......@@ -55,10 +55,32 @@ def test_getitem():
2.81404
]])
expected_gt_labels = np.array([0, 7, 6])
original_classes = sunrgbd_dataset.CLASSES
assert np.allclose(points, expected_points)
assert np.allclose(gt_bboxes_3d, expected_gt_bboxes_3d)
assert np.all(gt_labels.numpy() == expected_gt_labels)
assert original_classes == class_names
SUNRGBD_dataset = SUNRGBDDataset(
root_path, ann_file, pipeline=None, classes=['bed', 'table'])
assert SUNRGBD_dataset.CLASSES != original_classes
assert SUNRGBD_dataset.CLASSES == ['bed', 'table']
SUNRGBD_dataset = SUNRGBDDataset(
root_path, ann_file, pipeline=None, classes=('bed', 'table'))
assert SUNRGBD_dataset.CLASSES != original_classes
assert SUNRGBD_dataset.CLASSES == ('bed', 'table')
import tempfile
tmp_file = tempfile.NamedTemporaryFile()
with open(tmp_file.name, 'w') as f:
f.write('bed\ntable\n')
SUNRGBD_dataset = SUNRGBDDataset(
root_path, ann_file, pipeline=None, classes=tmp_file.name)
assert SUNRGBD_dataset.CLASSES != original_classes
assert SUNRGBD_dataset.CLASSES == ['bed', 'table']
def test_evaluate():
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment