"git@developer.sourcefind.cn:OpenDAS/ollama.git" did not exist on "c344da4c5a09521fce0c825dcef4df1c155b150d"
Unverified Commit f0e485bd authored by VVsssssk's avatar VVsssssk Committed by GitHub
Browse files

[FIx]Fix data convert (#1755)

* fix mmcv.model to mmengine.model

* fix dataset sweeps

* fix

* fix nus metric and CBGS

* add nus namemap

* fix create data

* fix comments

* fix tools

* add waymo

* remove test in kitti

* fix comments

* fix comments
parent 7d072d60
# Copyright (c) OpenMMLab. All rights reserved. # Copyright (c) OpenMMLab. All rights reserved.
import argparse import argparse
import os
from os import path as osp from os import path as osp
from tools.dataset_converters import indoor_converter as indoor from tools.dataset_converters import indoor_converter as indoor
...@@ -9,6 +8,7 @@ from tools.dataset_converters import lyft_converter as lyft_converter ...@@ -9,6 +8,7 @@ from tools.dataset_converters import lyft_converter as lyft_converter
from tools.dataset_converters import nuscenes_converter as nuscenes_converter from tools.dataset_converters import nuscenes_converter as nuscenes_converter
from tools.dataset_converters.create_gt_database import ( from tools.dataset_converters.create_gt_database import (
GTDatabaseCreater, create_groundtruth_database) GTDatabaseCreater, create_groundtruth_database)
from tools.dataset_converters.update_infos_to_v2 import update_pkl_infos
def kitti_data_prep(root_path, def kitti_data_prep(root_path,
...@@ -32,21 +32,20 @@ def kitti_data_prep(root_path, ...@@ -32,21 +32,20 @@ def kitti_data_prep(root_path,
kitti.create_kitti_info_file(root_path, info_prefix, with_plane) kitti.create_kitti_info_file(root_path, info_prefix, with_plane)
kitti.create_reduced_point_cloud(root_path, info_prefix) kitti.create_reduced_point_cloud(root_path, info_prefix)
info_train_path = osp.join(root_path, f'{info_prefix}_infos_train.pkl') info_train_path = osp.join(out_dir, f'{info_prefix}_infos_train.pkl')
info_val_path = osp.join(root_path, f'{info_prefix}_infos_val.pkl') info_val_path = osp.join(out_dir, f'{info_prefix}_infos_val.pkl')
info_trainval_path = osp.join(root_path, info_trainval_path = osp.join(out_dir, f'{info_prefix}_infos_trainval.pkl')
f'{info_prefix}_infos_trainval.pkl')
info_test_path = osp.join(root_path, f'{info_prefix}_infos_test.pkl')
kitti.export_2d_annotation(root_path, info_train_path) kitti.export_2d_annotation(root_path, info_train_path)
kitti.export_2d_annotation(root_path, info_val_path) kitti.export_2d_annotation(root_path, info_val_path)
kitti.export_2d_annotation(root_path, info_trainval_path) kitti.export_2d_annotation(root_path, info_trainval_path)
kitti.export_2d_annotation(root_path, info_test_path) update_pkl_infos('kitti', out_dir=out_dir, pkl_path=info_train_path)
update_pkl_infos('kitti', out_dir=out_dir, pkl_path=info_val_path)
update_pkl_infos('kitti', out_dir=out_dir, pkl_path=info_trainval_path)
create_groundtruth_database( create_groundtruth_database(
'KittiDataset', 'KittiDataset',
root_path, root_path,
info_prefix, info_prefix,
f'{out_dir}/{info_prefix}_infos_train.pkl', f'{info_prefix}_infos_train.pkl',
relative_path=False, relative_path=False,
mask_anno_path='instances_train.json', mask_anno_path='instances_train.json',
with_mask=(version == 'mask')) with_mask=(version == 'mask'))
...@@ -76,19 +75,22 @@ def nuscenes_data_prep(root_path, ...@@ -76,19 +75,22 @@ def nuscenes_data_prep(root_path,
root_path, info_prefix, version=version, max_sweeps=max_sweeps) root_path, info_prefix, version=version, max_sweeps=max_sweeps)
if version == 'v1.0-test': if version == 'v1.0-test':
info_test_path = osp.join(root_path, f'{info_prefix}_infos_test.pkl') info_test_path = osp.join(out_dir, f'{info_prefix}_infos_test.pkl')
nuscenes_converter.export_2d_annotation( nuscenes_converter.export_2d_annotation(
root_path, info_test_path, version=version) root_path, info_test_path, version=version)
update_pkl_infos('nuscenes', out_dir=out_dir, pkl_path=info_test_path)
return return
info_train_path = osp.join(root_path, f'{info_prefix}_infos_train.pkl') info_train_path = osp.join(out_dir, f'{info_prefix}_infos_train.pkl')
info_val_path = osp.join(root_path, f'{info_prefix}_infos_val.pkl') info_val_path = osp.join(out_dir, f'{info_prefix}_infos_val.pkl')
nuscenes_converter.export_2d_annotation( nuscenes_converter.export_2d_annotation(
root_path, info_train_path, version=version) root_path, info_train_path, version=version)
nuscenes_converter.export_2d_annotation( nuscenes_converter.export_2d_annotation(
root_path, info_val_path, version=version) root_path, info_val_path, version=version)
update_pkl_infos('nuscenes', out_dir=out_dir, pkl_path=info_train_path)
update_pkl_infos('nuscenes', out_dir=out_dir, pkl_path=info_val_path)
create_groundtruth_database(dataset_name, root_path, info_prefix, create_groundtruth_database(dataset_name, root_path, info_prefix,
f'{out_dir}/{info_prefix}_infos_train.pkl') f'{info_prefix}_infos_train.pkl')
def lyft_data_prep(root_path, info_prefix, version, max_sweeps=10): def lyft_data_prep(root_path, info_prefix, version, max_sweeps=10):
...@@ -107,6 +109,14 @@ def lyft_data_prep(root_path, info_prefix, version, max_sweeps=10): ...@@ -107,6 +109,14 @@ def lyft_data_prep(root_path, info_prefix, version, max_sweeps=10):
""" """
lyft_converter.create_lyft_infos( lyft_converter.create_lyft_infos(
root_path, info_prefix, version=version, max_sweeps=max_sweeps) root_path, info_prefix, version=version, max_sweeps=max_sweeps)
if version == 'v1.01-test':
info_test_path = osp.join(root_path, f'{info_prefix}_infos_test.pkl')
update_pkl_infos('lyft', out_dir=root_path, pkl_path=info_test_path)
elif version == 'v1.01-train':
info_train_path = osp.join(root_path, f'{info_prefix}_infos_train.pkl')
info_val_path = osp.join(root_path, f'{info_prefix}_infos_val.pkl')
update_pkl_infos('lyft', out_dir=root_path, pkl_path=info_train_path)
update_pkl_infos('lyft', out_dir=root_path, pkl_path=info_val_path)
def scannet_data_prep(root_path, info_prefix, out_dir, workers): def scannet_data_prep(root_path, info_prefix, out_dir, workers):
...@@ -120,6 +130,12 @@ def scannet_data_prep(root_path, info_prefix, out_dir, workers): ...@@ -120,6 +130,12 @@ def scannet_data_prep(root_path, info_prefix, out_dir, workers):
""" """
indoor.create_indoor_info_file( indoor.create_indoor_info_file(
root_path, info_prefix, out_dir, workers=workers) root_path, info_prefix, out_dir, workers=workers)
info_train_path = osp.join(out_dir, f'{info_prefix}_infos_train.pkl')
info_test_path = osp.join(out_dir, f'{info_prefix}_infos_test.pkl')
info_val_path = osp.join(out_dir, f'{info_prefix}_infos_val.pkl')
update_pkl_infos('scannet', out_dir=out_dir, pkl_path=info_train_path)
update_pkl_infos('scannet', out_dir=out_dir, pkl_path=info_test_path)
update_pkl_infos('scannet', out_dir=out_dir, pkl_path=info_val_path)
def s3dis_data_prep(root_path, info_prefix, out_dir, workers): def s3dis_data_prep(root_path, info_prefix, out_dir, workers):
...@@ -133,6 +149,10 @@ def s3dis_data_prep(root_path, info_prefix, out_dir, workers): ...@@ -133,6 +149,10 @@ def s3dis_data_prep(root_path, info_prefix, out_dir, workers):
""" """
indoor.create_indoor_info_file( indoor.create_indoor_info_file(
root_path, info_prefix, out_dir, workers=workers) root_path, info_prefix, out_dir, workers=workers)
splits = [f'Area_{i}' for i in [1, 2, 3, 4, 5, 6]]
for split in splits:
filename = osp.join(out_dir, f'{info_prefix}_infos_{split}.pkl')
update_pkl_infos('s3dis', out_dir=out_dir, pkl_path=filename)
def sunrgbd_data_prep(root_path, info_prefix, out_dir, workers): def sunrgbd_data_prep(root_path, info_prefix, out_dir, workers):
...@@ -146,6 +166,10 @@ def sunrgbd_data_prep(root_path, info_prefix, out_dir, workers): ...@@ -146,6 +166,10 @@ def sunrgbd_data_prep(root_path, info_prefix, out_dir, workers):
""" """
indoor.create_indoor_info_file( indoor.create_indoor_info_file(
root_path, info_prefix, out_dir, workers=workers) root_path, info_prefix, out_dir, workers=workers)
info_train_path = osp.join(out_dir, f'{info_prefix}_infos_train.pkl')
info_val_path = osp.join(out_dir, f'{info_prefix}_infos_val.pkl')
update_pkl_infos('scannet', out_dir=out_dir, pkl_path=info_train_path)
update_pkl_infos('scannet', out_dir=out_dir, pkl_path=info_val_path)
def waymo_data_prep(root_path, def waymo_data_prep(root_path,
...@@ -185,6 +209,12 @@ def waymo_data_prep(root_path, ...@@ -185,6 +209,12 @@ def waymo_data_prep(root_path,
out_dir = osp.join(out_dir, 'kitti_format') out_dir = osp.join(out_dir, 'kitti_format')
kitti.create_waymo_info_file( kitti.create_waymo_info_file(
out_dir, info_prefix, max_sweeps=max_sweeps, workers=workers) out_dir, info_prefix, max_sweeps=max_sweeps, workers=workers)
info_train_path = osp.join(out_dir, f'{info_prefix}_infos_train.pkl')
info_val_path = osp.join(out_dir, f'{info_prefix}_infos_val.pkl')
info_trainval_path = osp.join(out_dir, f'{info_prefix}_infos_trainval.pkl')
update_pkl_infos('kitti', out_dir=out_dir, pkl_path=info_train_path)
update_pkl_infos('kitti', out_dir=out_dir, pkl_path=info_val_path)
update_pkl_infos('kitti', out_dir=out_dir, pkl_path=info_trainval_path)
GTDatabaseCreater( GTDatabaseCreater(
'WaymoDataset', 'WaymoDataset',
out_dir, out_dir,
...@@ -230,6 +260,8 @@ parser.add_argument( ...@@ -230,6 +260,8 @@ parser.add_argument(
args = parser.parse_args() args = parser.parse_args()
if __name__ == '__main__': if __name__ == '__main__':
from mmdet3d.utils import register_all_modules
register_all_modules()
if args.dataset == 'kitti': if args.dataset == 'kitti':
kitti_data_prep( kitti_data_prep(
root_path=args.root_path, root_path=args.root_path,
...@@ -304,12 +336,3 @@ if __name__ == '__main__': ...@@ -304,12 +336,3 @@ if __name__ == '__main__':
workers=args.workers) workers=args.workers)
else: else:
raise NotImplementedError(f'Don\'t support {args.dataset} dataset.') raise NotImplementedError(f'Don\'t support {args.dataset} dataset.')
for file_name in os.listdir(args.out_dir):
if '_infos_' in file_name and '.pkl' in file_name:
cmd = f'python tools/dataset_converters/update_infos_to_v2.py ' \
f'--dataset {args.dataset} ' \
f'--pkl {osp.join(args.out_dir,file_name)} ' \
f'--out-dir {args.out_dir}'
print(cmd)
os.system(cmd)
...@@ -149,14 +149,12 @@ def create_groundtruth_database(dataset_class_name, ...@@ -149,14 +149,12 @@ def create_groundtruth_database(dataset_class_name,
if dataset_class_name == 'KittiDataset': if dataset_class_name == 'KittiDataset':
file_client_args = dict(backend='disk') file_client_args = dict(backend='disk')
dataset_cfg.update( dataset_cfg.update(
test_mode=False,
split='training',
modality=dict( modality=dict(
use_lidar=True, use_lidar=True,
use_depth=False,
use_lidar_intensity=True,
use_camera=with_mask, use_camera=with_mask,
), ),
data_prefix=dict(
pts='training/velodyne_reduced', img='training/image_2'),
pipeline=[ pipeline=[
dict( dict(
type='LoadPointsFromFile', type='LoadPointsFromFile',
...@@ -174,6 +172,8 @@ def create_groundtruth_database(dataset_class_name, ...@@ -174,6 +172,8 @@ def create_groundtruth_database(dataset_class_name,
elif dataset_class_name == 'NuScenesDataset': elif dataset_class_name == 'NuScenesDataset':
dataset_cfg.update( dataset_cfg.update(
use_valid_flag=True, use_valid_flag=True,
data_prefix=dict(
pts='samples/LIDAR_TOP', img='', sweeps='sweeps/LIDAR_TOP'),
pipeline=[ pipeline=[
dict( dict(
type='LoadPointsFromFile', type='LoadPointsFromFile',
...@@ -236,14 +236,13 @@ def create_groundtruth_database(dataset_class_name, ...@@ -236,14 +236,13 @@ def create_groundtruth_database(dataset_class_name,
group_counter = 0 group_counter = 0
for j in track_iter_progress(list(range(len(dataset)))): for j in track_iter_progress(list(range(len(dataset)))):
input_dict = dataset.get_data_info(j) data_info = dataset.get_data_info(j)
dataset.pre_pipeline(input_dict) example = dataset.pipeline(data_info)
example = dataset.pipeline(input_dict)
annos = example['ann_info'] annos = example['ann_info']
image_idx = example['sample_idx'] image_idx = example['sample_idx']
points = example['points'].tensor.numpy() points = example['points'].tensor.numpy()
gt_boxes_3d = annos['gt_bboxes_3d'].tensor.numpy() gt_boxes_3d = annos['gt_bboxes_3d'].tensor.numpy()
names = annos['gt_names'] names = [dataset.metainfo['CLASSES'][i] for i in annos['gt_labels_3d']]
group_dict = dict() group_dict = dict()
if 'group_ids' in annos: if 'group_ids' in annos:
group_ids = annos['group_ids'] group_ids = annos['group_ids']
......
...@@ -152,8 +152,8 @@ def get_single_lidar_sweep(): ...@@ -152,8 +152,8 @@ def get_single_lidar_sweep():
return single_lidar_sweep return single_lidar_sweep
def get_empty_standard_data_info(camera_types=['CAM1', 'CAM2', 'CAM3', def get_empty_standard_data_info(
'CAM4']): camera_types=['CAM0', 'CAM1', 'CAM2', 'CAM3', 'CAM4']):
data_info = dict( data_info = dict(
# (str): Sample id of the frame. # (str): Sample id of the frame.
...@@ -1031,28 +1031,28 @@ def parse_args(): ...@@ -1031,28 +1031,28 @@ def parse_args():
return args return args
def main(): def update_pkl_infos(dataset, out_dir, pkl_path):
args = parse_args() if dataset.lower() == 'kitti':
if args.out_dir is None: update_kitti_infos(pkl_path=pkl_path, out_dir=out_dir)
args.out_dir = args.root_dir elif dataset.lower() == 'waymo':
if args.dataset.lower() == 'kitti': update_waymo_infos(pkl_path=pkl_path, out_dir=out_dir)
update_kitti_infos(pkl_path=args.pkl, out_dir=args.out_dir) elif dataset.lower() == 'scannet':
elif args.dataset.lower() == 'waymo': update_scannet_infos(pkl_path=pkl_path, out_dir=out_dir)
update_waymo_infos(pkl_path=args.pkl, out_dir=args.out_dir) elif dataset.lower() == 'sunrgbd':
elif args.dataset.lower() == 'scannet': update_sunrgbd_infos(pkl_path=pkl_path, out_dir=out_dir)
update_scannet_infos(pkl_path=args.pkl, out_dir=args.out_dir) elif dataset.lower() == 'lyft':
elif args.dataset.lower() == 'sunrgbd': update_lyft_infos(pkl_path=pkl_path, out_dir=out_dir)
update_sunrgbd_infos(pkl_path=args.pkl, out_dir=args.out_dir) elif dataset.lower() == 'nuscenes':
elif args.dataset.lower() == 'lyft': update_nuscenes_infos(pkl_path=pkl_path, out_dir=out_dir)
update_lyft_infos(pkl_path=args.pkl, out_dir=args.out_dir) elif dataset.lower() == 's3dis':
elif args.dataset.lower() == 'nuscenes': update_s3dis_infos(pkl_path=pkl_path, out_dir=out_dir)
update_nuscenes_infos(pkl_path=args.pkl, out_dir=args.out_dir)
elif args.dataset.lower() == 's3dis':
update_s3dis_infos(pkl_path=args.pkl, out_dir=args.out_dir)
else: else:
raise NotImplementedError( raise NotImplementedError(f'Do not support convert {dataset} to v2.')
f'Do not support convert {args.dataset} to v2.')
if __name__ == '__main__': if __name__ == '__main__':
main() args = parse_args()
if args.out_dir is None:
args.out_dir = args.root_dir
update_pkl_infos(
dataset=args.dataset, out_dir=args.out_dir, pkl_path=args.pkl_path)
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment