Commit ad08a792 authored by yinchimaoliang's avatar yinchimaoliang
Browse files

use fstring, change gitignore and fix docstring

parent f27d308f
...@@ -125,3 +125,6 @@ exps/ ...@@ -125,3 +125,6 @@ exps/
# demo # demo
*.jpg *.jpg
*.png *.png
/data/scannet/scans/
/data/sunrgbd/OFFICIAL_SUNRGBD/
/data/sunrgbd/sunrgbd_trainval/
...@@ -3,6 +3,6 @@ line_length = 79 ...@@ -3,6 +3,6 @@ line_length = 79
multi_line_output = 0 multi_line_output = 0
known_standard_library = setuptools known_standard_library = setuptools
known_first_party = mmdet,mmdet3d known_first_party = mmdet,mmdet3d
known_third_party = cv2,mmcv,numba,numpy,nuscenes,plyfile,pycocotools,pyquaternion,pytest,scipy,shapely,six,skimage,terminaltables,torch,torchvision known_third_party = cv2,load_scannet_data,mmcv,numba,numpy,nuscenes,plyfile,pycocotools,pyquaternion,pytest,scannet_utils,scipy,shapely,six,skimage,sunrgbd_utils,terminaltables,torch,torchvision
no_lines_before = STDLIB,LOCALFOLDER no_lines_before = STDLIB,LOCALFOLDER
default_section = THIRDPARTY default_section = THIRDPARTY
# Copyright (c) Facebook, Inc. and its affiliates. # Modified from Facebook, Inc. and its affiliates.
# #
# This source code is licensed under the MIT license found in the # This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree. # LICENSE file in the root directory of this source tree.
...@@ -10,6 +10,7 @@ Usage example: python ./batch_load_scannet_data.py ...@@ -10,6 +10,7 @@ Usage example: python ./batch_load_scannet_data.py
import argparse import argparse
import datetime import datetime
import os import os
import os.path as osp
import numpy as np import numpy as np
from load_scannet_data import export from load_scannet_data import export
...@@ -22,15 +23,13 @@ OBJ_CLASS_IDS = np.array( ...@@ -22,15 +23,13 @@ OBJ_CLASS_IDS = np.array(
def export_one_scan(scan_name, output_filename_prefix, max_num_point, def export_one_scan(scan_name, output_filename_prefix, max_num_point,
label_map_file, scannet_dir): label_map_file, scannet_dir):
mesh_file = os.path.join(scannet_dir, scan_name, mesh_file = osp.join(scannet_dir, scan_name, scan_name + '_vh_clean_2.ply')
scan_name + '_vh_clean_2.ply') agg_file = osp.join(scannet_dir, scan_name,
agg_file = os.path.join(scannet_dir, scan_name, scan_name + '.aggregation.json')
scan_name + '.aggregation.json') seg_file = osp.join(scannet_dir, scan_name,
seg_file = os.path.join(scannet_dir, scan_name, scan_name + '_vh_clean_2.0.010000.segs.json')
scan_name + '_vh_clean_2.0.010000.segs.json') # includes axisAlignment info for the train set scans.
meta_file = os.path.join( meta_file = osp.join(scannet_dir, scan_name, f'{scan_name}.txt')
scannet_dir, scan_name, scan_name +
'.txt') # includes axisAlignment info for the train set scans.
mesh_vertices, semantic_labels, instance_labels, instance_bboxes, \ mesh_vertices, semantic_labels, instance_labels, instance_bboxes, \
instance2semantic = export(mesh_file, agg_file, seg_file, instance2semantic = export(mesh_file, agg_file, seg_file,
meta_file, label_map_file, None) meta_file, label_map_file, None)
...@@ -41,7 +40,7 @@ def export_one_scan(scan_name, output_filename_prefix, max_num_point, ...@@ -41,7 +40,7 @@ def export_one_scan(scan_name, output_filename_prefix, max_num_point,
instance_labels = instance_labels[mask] instance_labels = instance_labels[mask]
num_instances = len(np.unique(instance_labels)) num_instances = len(np.unique(instance_labels))
print('Num of instances: ', num_instances) print(f'Num of instances: {num_instances}')
bbox_mask = np.in1d(instance_bboxes[:, -1], OBJ_CLASS_IDS) bbox_mask = np.in1d(instance_bboxes[:, -1], OBJ_CLASS_IDS)
instance_bboxes = instance_bboxes[bbox_mask, :] instance_bboxes = instance_bboxes[bbox_mask, :]
...@@ -54,16 +53,16 @@ def export_one_scan(scan_name, output_filename_prefix, max_num_point, ...@@ -54,16 +53,16 @@ def export_one_scan(scan_name, output_filename_prefix, max_num_point,
semantic_labels = semantic_labels[choices] semantic_labels = semantic_labels[choices]
instance_labels = instance_labels[choices] instance_labels = instance_labels[choices]
np.save(output_filename_prefix + '_vert.npy', mesh_vertices) np.save(f'{output_filename_prefix}_vert.npy', mesh_vertices)
np.save(output_filename_prefix + '_sem_label.npy', semantic_labels) np.save(f'{output_filename_prefix}_sem_label.npy', semantic_labels)
np.save(output_filename_prefix + '_ins_label.npy', instance_labels) np.save(f'{output_filename_prefix}_ins_label.npy', instance_labels)
np.save(output_filename_prefix + '_bbox.npy', instance_bboxes) np.save(f'{output_filename_prefix}_bbox.npy', instance_bboxes)
def batch_export(max_num_point, output_folder, train_scan_names_file, def batch_export(max_num_point, output_folder, train_scan_names_file,
label_map_file, scannet_dir): label_map_file, scannet_dir):
if not os.path.exists(output_folder): if not os.path.exists(output_folder):
print('Creating new data folder: {}'.format(output_folder)) print(f'Creating new data folder: {output_folder}')
os.mkdir(output_folder) os.mkdir(output_folder)
train_scan_names = [line.rstrip() for line in open(train_scan_names_file)] train_scan_names = [line.rstrip() for line in open(train_scan_names_file)]
...@@ -71,8 +70,8 @@ def batch_export(max_num_point, output_folder, train_scan_names_file, ...@@ -71,8 +70,8 @@ def batch_export(max_num_point, output_folder, train_scan_names_file,
print('-' * 20 + 'begin') print('-' * 20 + 'begin')
print(datetime.datetime.now()) print(datetime.datetime.now())
print(scan_name) print(scan_name)
output_filename_prefix = os.path.join(output_folder, scan_name) output_filename_prefix = osp.join(output_folder, scan_name)
if os.path.isfile(output_filename_prefix + '_vert.npy'): if osp.isfile(f'{output_filename_prefix}_vert.npy'):
print('File already exists. skipping.') print('File already exists. skipping.')
print('-' * 20 + 'done') print('-' * 20 + 'done')
continue continue
......
...@@ -58,11 +58,25 @@ def export(mesh_file, ...@@ -58,11 +58,25 @@ def export(mesh_file,
meta_file, meta_file,
label_map_file, label_map_file,
output_file=None): output_file=None):
""" points are XYZ RGB (RGB in 0-255), """Export original files to vert, ins_label, sem_label and bbox file.
semantic label as nyu40 ids,
instance label as 1-#instance, Args:
box as (cx,cy,cz,dx,dy,dz,semantic_label) mesh_file(str): Path of the mesh_file.
agg_file(str): Path of the agg_file.
seg_file(str): Path of the seg_file.
meta_file(str): Path of the meta_file.
label_map_file(str): Path of the label_map_file.
output_file(str): Path of the output folder.
Default: None.
Return:
ndarray: Vertices of points data.
ndarray: Indexes of label.
ndarray: Indexes of instance.
ndarray: Instance bboxes.
dict: Map from object_id to label_id.
""" """
label_map = scannet_utils.read_label_mapping( label_map = scannet_utils.read_label_mapping(
label_map_file, label_from='raw_category', label_to='nyu40id') label_map_file, label_from='raw_category', label_to='nyu40id')
mesh_vertices = scannet_utils.read_mesh_vertices_rgb(mesh_file) mesh_vertices = scannet_utils.read_mesh_vertices_rgb(mesh_file)
...@@ -85,7 +99,7 @@ def export(mesh_file, ...@@ -85,7 +99,7 @@ def export(mesh_file,
# Load semantic and instance labels # Load semantic and instance labels
object_id_to_segs, label_to_segs = read_aggregation(agg_file) object_id_to_segs, label_to_segs = read_aggregation(agg_file)
seg_to_verts, num_verts = read_segmentation(seg_file) seg_to_verts, num_verts = read_segmentation(seg_file)
label_ids = np.zeros(shape=(num_verts), dtype=np.uint32) # 0: unannotated label_ids = np.zeros(shape=(num_verts), dtype=np.uint32)
object_id_to_label_id = {} object_id_to_label_id = {}
for label, segs in label_to_segs.items(): for label, segs in label_to_segs.items():
label_id = label_map[label] label_id = label_map[label]
...@@ -107,11 +121,6 @@ def export(mesh_file, ...@@ -107,11 +121,6 @@ def export(mesh_file,
obj_pc = mesh_vertices[instance_ids == obj_id, 0:3] obj_pc = mesh_vertices[instance_ids == obj_id, 0:3]
if len(obj_pc) == 0: if len(obj_pc) == 0:
continue continue
# Compute axis aligned box
# An axis aligned bounding box is parameterized by
# (cx,cy,cz) and (dx,dy,dz) and label id
# where (cx,cy,cz) is the center point of the box,
# dx is the x-axis length of the box.
xmin = np.min(obj_pc[:, 0]) xmin = np.min(obj_pc[:, 0])
ymin = np.min(obj_pc[:, 1]) ymin = np.min(obj_pc[:, 1])
zmin = np.min(obj_pc[:, 2]) zmin = np.min(obj_pc[:, 2])
......
...@@ -41,7 +41,17 @@ def random_sampling(pc, num_sample, replace=None, return_choices=False): ...@@ -41,7 +41,17 @@ def random_sampling(pc, num_sample, replace=None, return_choices=False):
class sunrgbd_object(object): class sunrgbd_object(object):
''' Load and parse object data ''' """Surrgbd object.
Load and parse object data.
Args:
root_dir(str): Root directory.
split(str): Training or testing.
Default: 'training.
use_v1(bool): Whether to use v1.
Default(False).
"""
def __init__(self, root_dir, split='training', use_v1=False): def __init__(self, root_dir, split='training', use_v1=False):
self.root_dir = root_dir self.root_dir = root_dir
......
...@@ -34,9 +34,16 @@ class2type = {type2class[t]: t for t in type2class} ...@@ -34,9 +34,16 @@ class2type = {type2class[t]: t for t in type2class}
def flip_axis_to_camera(pc): def flip_axis_to_camera(pc):
''' Flip X-right,Y-forward,Z-up to X-right,Y-down,Z-forward """Flip axis to camera.
Input and output are both (N,3) array
''' Flip X-right,Y-forward,Z-up to X-right,Y-down,Z-forward.
Args:
pc(ndarray): points in depth axis.
Return:
ndarray: points in camera axis.
"""
pc2 = np.copy(pc) pc2 = np.copy(pc)
pc2[:, [0, 1, 2]] = pc2[:, [0, 2, 1]] # cam X,Y,Z = depth X,-Z,Y pc2[:, [0, 1, 2]] = pc2[:, [0, 2, 1]] # cam X,Y,Z = depth X,-Z,Y
pc2[:, 1] *= -1 pc2[:, 1] *= -1
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment