batch_load_scannet_data.py 4.38 KB
Newer Older
1
2
# Modified from
# https://github.com/facebookresearch/votenet/blob/master/scannet/batch_load_scannet_data.py
3
4
5
6
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
7
"""Batch mode in loading Scannet scenes with vertices and ground truth labels
liyinhao's avatar
liyinhao committed
8
9
10
11
for semantic and instance segmentations

Usage example: python ./batch_load_scannet_data.py
"""
12

13
import argparse
liyinhao's avatar
liyinhao committed
14
15
import datetime
import os
16
import os.path as osp
liyinhao's avatar
liyinhao committed
17
18
19
20
21
22
23
24
25
26

import numpy as np
from load_scannet_data import export

SCANNET_DIR = 'scans'
DONOTCARE_CLASS_IDS = np.array([])
OBJ_CLASS_IDS = np.array(
    [3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 14, 16, 24, 28, 33, 34, 36, 39])


27
28
def export_one_scan(scan_name, output_filename_prefix, max_num_point,
                    label_map_file, scannet_dir):
29
30
31
32
33
34
35
    mesh_file = osp.join(scannet_dir, scan_name, scan_name + '_vh_clean_2.ply')
    agg_file = osp.join(scannet_dir, scan_name,
                        scan_name + '.aggregation.json')
    seg_file = osp.join(scannet_dir, scan_name,
                        scan_name + '_vh_clean_2.0.010000.segs.json')
    # includes axisAlignment info for the train set scans.
    meta_file = osp.join(scannet_dir, scan_name, f'{scan_name}.txt')
liyinhao's avatar
liyinhao committed
36
37
    mesh_vertices, semantic_labels, instance_labels, instance_bboxes, \
        instance2semantic = export(mesh_file, agg_file, seg_file,
38
                                   meta_file, label_map_file, None)
liyinhao's avatar
liyinhao committed
39
40
41
42
43
44
45

    mask = np.logical_not(np.in1d(semantic_labels, DONOTCARE_CLASS_IDS))
    mesh_vertices = mesh_vertices[mask, :]
    semantic_labels = semantic_labels[mask]
    instance_labels = instance_labels[mask]

    num_instances = len(np.unique(instance_labels))
46
    print(f'Num of instances: {num_instances}')
liyinhao's avatar
liyinhao committed
47
48
49
50
51
52

    bbox_mask = np.in1d(instance_bboxes[:, -1], OBJ_CLASS_IDS)
    instance_bboxes = instance_bboxes[bbox_mask, :]
    print('Num of care instances: ', instance_bboxes.shape[0])

    N = mesh_vertices.shape[0]
53
54
    if N > max_num_point:
        choices = np.random.choice(N, max_num_point, replace=False)
liyinhao's avatar
liyinhao committed
55
56
57
58
        mesh_vertices = mesh_vertices[choices, :]
        semantic_labels = semantic_labels[choices]
        instance_labels = instance_labels[choices]

59
60
61
62
    np.save(f'{output_filename_prefix}_vert.npy', mesh_vertices)
    np.save(f'{output_filename_prefix}_sem_label.npy', semantic_labels)
    np.save(f'{output_filename_prefix}_ins_label.npy', instance_labels)
    np.save(f'{output_filename_prefix}_bbox.npy', instance_bboxes)
liyinhao's avatar
liyinhao committed
63
64


65
66
67
def batch_export(max_num_point, output_folder, train_scan_names_file,
                 label_map_file, scannet_dir):
    if not os.path.exists(output_folder):
68
        print(f'Creating new data folder: {output_folder}')
69
        os.mkdir(output_folder)
liyinhao's avatar
liyinhao committed
70

71
72
    train_scan_names = [line.rstrip() for line in open(train_scan_names_file)]
    for scan_name in train_scan_names:
liyinhao's avatar
liyinhao committed
73
74
75
        print('-' * 20 + 'begin')
        print(datetime.datetime.now())
        print(scan_name)
76
77
        output_filename_prefix = osp.join(output_folder, scan_name)
        if osp.isfile(f'{output_filename_prefix}_vert.npy'):
liyinhao's avatar
liyinhao committed
78
79
80
81
            print('File already exists. skipping.')
            print('-' * 20 + 'done')
            continue
        try:
82
83
            export_one_scan(scan_name, output_filename_prefix, max_num_point,
                            label_map_file, scannet_dir)
liyinhao's avatar
liyinhao committed
84
85
86
87
88
        except Exception:
            print('Failed export scan: %s' % (scan_name))
        print('-' * 20 + 'done')


89
90
91
92
93
94
95
96
97
98
99
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument(
        '--max_num_point',
        default=50000,
        help='The maximum number of the points.')
    parser.add_argument(
        '--output_folder',
        default='./scannet_train_instance_data',
        help='output folder of the result.')
    parser.add_argument(
yinchimaoliang's avatar
yinchimaoliang committed
100
        '--scannet_dir', default='scans', help='scannet data directory.')
101
102
103
104
105
106
107
108
109
110
111
112
113
114
    parser.add_argument(
        '--label_map_file',
        default='meta_data/scannetv2-labels.combined.tsv',
        help='The path of label map file.')
    parser.add_argument(
        '--train_scan_names_file',
        default='meta_data/scannet_train.txt',
        help='The path of the file that stores the scan names.')
    args = parser.parse_args()
    batch_export(args.max_num_point, args.output_folder,
                 args.train_scan_names_file, args.label_map_file,
                 args.scannet_dir)


liyinhao's avatar
liyinhao committed
115
if __name__ == '__main__':
116
    main()