"git@developer.sourcefind.cn:OpenDAS/torchaudio.git" did not exist on "70987b01df1f3a0cac2e8d4cf7fa5fddab0be874"
Commit cda6a1a3 authored by liyinhao's avatar liyinhao
Browse files

create color module, delete somr extra files, change npz to npy

parent c4b2f80b
...@@ -5,7 +5,28 @@ import numpy as np ...@@ -5,7 +5,28 @@ import numpy as np
from mmdet.datasets.registry import PIPELINES from mmdet.datasets.registry import PIPELINES
@PIPELINES.register_module @PIPELINES.register_module()
class PointsColorNormalize(object):
"""Points Color Normalize
Normalize color of the points.
Args:
color_mean (List[float]): Mean color of the point cloud.
"""
def __init__(self, color_mean):
self.color_mean = color_mean
def __call__(self, results):
points = results.get('results', None)
assert points.shape[1] >= 6
points[:, 3:6] = points[:, 3:6] - np.array(self.color_mean) / 256.0
results['points'] = points
return results
@PIPELINES.register_module()
class LoadPointsFromFile(object): class LoadPointsFromFile(object):
"""Load Points From File. """Load Points From File.
...@@ -13,32 +34,23 @@ class LoadPointsFromFile(object): ...@@ -13,32 +34,23 @@ class LoadPointsFromFile(object):
Args: Args:
use_height (bool): Whether to use height. use_height (bool): Whether to use height.
color_mean (List[float]): Mean color of the point cloud.
load_dim (int): The dimension of the loaded points. load_dim (int): The dimension of the loaded points.
Default: 6. Default: 6.
use_dim (List[int]): Which dimensions of the points to be used. use_dim (List[int]): Which dimensions of the points to be used.
Default: [0, 1, 2]. Default: [0, 1, 2].
""" """
def __init__(self, use_height, color_mean, load_dim=6, use_dim=[0, 1, 2]): def __init__(self, use_height, load_dim=6, use_dim=[0, 1, 2]):
self.use_height = use_height self.use_height = use_height
self.color_mean = color_mean
assert max(use_dim) < load_dim assert max(use_dim) < load_dim
self.load_dim = load_dim self.load_dim = load_dim
self.use_dim = use_dim self.use_dim = use_dim
def __call__(self, results): def __call__(self, results):
pts_filename = results.get('pts_filename', None) pts_filename = results.get('pts_filename', None)
info = results.get('info', None)
name = 'scannet' if info.get('image', None) is None else 'sunrgbd'
assert osp.exists(pts_filename) assert osp.exists(pts_filename)
if name == 'scannet': points = np.load(pts_filename)
points = np.load(pts_filename)
else:
points = np.load(pts_filename)['pc']
points = points.reshape(-1, self.load_dim) points = points.reshape(-1, self.load_dim)
if self.load_dim >= 6:
points[:, 3:6] = points[:, 3:6] - np.array(self.color_mean) / 256.0
points = points[:, self.use_dim] points = points[:, self.use_dim]
if self.use_height: if self.use_height:
......
...@@ -8,21 +8,20 @@ from mmdet3d.datasets.pipelines.indoor_loading import (LoadAnnotations3D, ...@@ -8,21 +8,20 @@ from mmdet3d.datasets.pipelines.indoor_loading import (LoadAnnotations3D,
def test_load_points_from_file(): def test_load_points_from_file():
sunrgbd_info = mmcv.load('./tests/data/sunrgbd/sunrgbd_infos.pkl') sunrgbd_info = mmcv.load('./tests/data/sunrgbd/sunrgbd_infos.pkl')
sunrgbd_load_points_from_file = LoadPointsFromFile(True, [0.5, 0.5, 0.5], sunrgbd_load_points_from_file = LoadPointsFromFile(True, 6)
6)
sunrgbd_results = dict() sunrgbd_results = dict()
data_path = './tests/data/sunrgbd/sunrgbd_trainval' data_path = './tests/data/sunrgbd/sunrgbd_trainval'
sunrgbd_info = sunrgbd_info[0] sunrgbd_info = sunrgbd_info[0]
scan_name = sunrgbd_info['point_cloud']['lidar_idx'] scan_name = sunrgbd_info['point_cloud']['lidar_idx']
sunrgbd_results['info'] = sunrgbd_info sunrgbd_results['info'] = sunrgbd_info
sunrgbd_results['pts_filename'] = osp.join(data_path, 'lidar', sunrgbd_results['pts_filename'] = osp.join(data_path, 'lidar',
'%06d.npz' % scan_name) '%06d.npy' % scan_name)
sunrgbd_results = sunrgbd_load_points_from_file(sunrgbd_results) sunrgbd_results = sunrgbd_load_points_from_file(sunrgbd_results)
sunrgbd_point_cloud = sunrgbd_results.get('points', None) sunrgbd_point_cloud = sunrgbd_results.get('points', None)
assert sunrgbd_point_cloud.shape == (100, 4) assert sunrgbd_point_cloud.shape == (100, 4)
scannet_info = mmcv.load('./tests/data/scannet/scannet_infos.pkl') scannet_info = mmcv.load('./tests/data/scannet/scannet_infos.pkl')
scannet_load_data = LoadPointsFromFile(True, [0.5, 0.5, 0.5]) scannet_load_data = LoadPointsFromFile(True)
scannet_results = dict() scannet_results = dict()
data_path = './tests/data/scannet/scannet_train_instance_data' data_path = './tests/data/scannet/scannet_train_instance_data'
scannet_results['data_path'] = data_path scannet_results['data_path'] = data_path
......
...@@ -117,9 +117,9 @@ class SUNRGBDData(object): ...@@ -117,9 +117,9 @@ class SUNRGBDData(object):
# TODO : sample points in loading process and test # TODO : sample points in loading process and test
pc_upright_depth_subsampled = random_sampling( pc_upright_depth_subsampled = random_sampling(
pc_upright_depth, SAMPLE_NUM) pc_upright_depth, SAMPLE_NUM)
np.savez_compressed( np.save(
os.path.join(self.root_dir, 'lidar', '%06d.npz' % sample_idx), os.path.join(self.root_dir, 'lidar', '%06d.npy' % sample_idx),
pc=pc_upright_depth_subsampled) pc_upright_depth_subsampled)
info = dict() info = dict()
pc_info = {'num_features': 6, 'lidar_idx': sample_idx} pc_info = {'num_features': 6, 'lidar_idx': sample_idx}
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment