"...git@developer.sourcefind.cn:OpenDAS/torch-cluster.git" did not exist on "abac33d313b92b4783367d016c87169b8cbce6b5"
Commit addc86ad authored by zhangwenwei's avatar zhangwenwei
Browse files

Merge branch 'fix_visualize' into 'master'

Fix visualize

See merge request open-mmlab/mmdet.3d!93
parents e8298d24 1ef8d566
......@@ -81,13 +81,13 @@ def show_result(points, gt_bboxes, pred_bboxes, out_dir, filename):
mmcv.mkdir_or_exist(out_dir)
if gt_bboxes is not None:
gt_bboxes[:, -1] *= -1
gt_bboxes[:, 6] *= -1
_write_oriented_bbox(gt_bboxes, osp.join(out_dir,
f'{filename}_gt.ply'))
if points is not None:
_write_ply(points, osp.join(out_dir, f'{filename}_points.obj'))
if pred_bboxes is not None:
pred_bboxes[:, -1] *= -1
pred_bboxes[:, 6] *= -1
_write_oriented_bbox(pred_bboxes,
osp.join(out_dir, f'{filename}_pred.ply'))
......@@ -555,13 +555,11 @@ class KittiDataset(Custom3DDataset):
def show(self, results, out_dir):
assert out_dir is not None, 'Expect out_dir, got none.'
for i, result in enumerate(results):
example = self.prepare_test_data(i)
data_info = self.data_infos[i]
pts_path = data_info['point_cloud']['velodyne_path']
file_name = osp.split(pts_path)[-1].split('.')[0]
points = np.fromfile(
osp.join(self.root_split, 'velodyne_reduced',
f'{file_name}.bin'),
dtype=np.float32).reshape(-1, 4)
points = example['points'][0]._data.numpy()
points = points[..., [1, 0, 2]]
points[..., 0] *= -1
gt_bboxes = self.get_ann_info(i)['gt_bboxes_3d'].tensor
......@@ -573,4 +571,3 @@ class KittiDataset(Custom3DDataset):
Box3DMode.DEPTH)
pred_bboxes[..., 2] += pred_bboxes[..., 5] / 2
show_result(points, gt_bboxes, pred_bboxes, out_dir, file_name)
print(results)
......@@ -401,22 +401,23 @@ class NuScenesDataset(Custom3DDataset):
def show(self, results, out_dir):
for i, result in enumerate(results):
example = self.prepare_test_data(i)
points = example['points'][0]._data.numpy()
data_info = self.data_infos[i]
pts_path = data_info['lidar_path']
file_name = osp.split(pts_path)[-1].split('.')[0]
points = np.fromfile(pts_path, dtype=np.float32).reshape(-1, 4)
points = points[..., [1, 0, 2]]
points[..., 0] *= -1
inds = result['pts_bbox']['scores_3d'] > 0.1
gt_bboxes = self.get_ann_info(i)['gt_bboxes_3d'].tensor
gt_bboxes = Box3DMode.convert(gt_bboxes, Box3DMode.LIDAR,
Box3DMode.DEPTH)
gt_bboxes[..., 2] += gt_bboxes[..., 5] / 2
pred_bboxes = result['boxes_3d'].tensor.numpy()
pred_bboxes = result['pts_bbox']['boxes_3d'][inds].tensor.numpy()
pred_bboxes = Box3DMode.convert(pred_bboxes, Box3DMode.LIDAR,
Box3DMode.DEPTH)
pred_bboxes[..., 2] += pred_bboxes[..., 5] / 2
show_result(points, gt_bboxes, pred_bboxes, out_dir, file_name)
print(results)
def output_to_nusc_box(detection):
......
import copy
from os import path as osp
from mmdet3d.core import Box3DMode, show_result
......@@ -64,7 +65,7 @@ class Base3DDetector(BaseDetector):
assert out_dir is not None, 'Expect out_dir, got none.'
pred_bboxes = result['pts_bbox']['boxes_3d'].tensor.numpy()
pred_bboxes = copy.deepcopy(result['boxes_3d'].tensor.numpy())
# for now we convert points into depth mode
if data['img_metas'][0]._data[0][0]['box_mode_3d'] != Box3DMode.DEPTH:
points = points[..., [1, 0, 2]]
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment