Commit 7a6deaef authored by ChaimZhu's avatar ChaimZhu Committed by ZwwWayne
Browse files

[Refactor] rename `CLASSES` and `PALETTE` to `classes` and `palette` in dataset metainfo (#1932)

* rame CLASS and PALETTE to class and palette

* change mmcv-full to mmcv

* fix comments
parent 48ab8e2d
......@@ -136,7 +136,7 @@ class TestScanNetDataset(unittest.TestCase):
ann_file,
data_prefix=data_prefix,
pipeline=pipeline,
metainfo=dict(CLASSES=classes),
metainfo=dict(classes=classes),
modality=modality)
scannet_dataset.prepare_data(0)
......@@ -163,7 +163,7 @@ class TestScanNetDataset(unittest.TestCase):
torch.tensor([107.7353]))
no_class_scannet_dataset = ScanNetDataset(
data_root, ann_file, metainfo=dict(CLASSES=['cabinet']))
data_root, ann_file, metainfo=dict(classes=['cabinet']))
input_dict = no_class_scannet_dataset.get_data_info(0)
ann_info = no_class_scannet_dataset.parse_ann_info(input_dict)
......@@ -175,7 +175,7 @@ class TestScanNetDataset(unittest.TestCase):
self.assertEqual(ann_info['gt_labels_3d'].dtype, np.int64)
# all instance have been filtered by classes
self.assertEqual(len(ann_info['gt_labels_3d']), 27)
self.assertEqual(len(no_class_scannet_dataset.metainfo['CLASSES']), 1)
self.assertEqual(len(no_class_scannet_dataset.metainfo['classes']), 1)
def test_scannet_seg(self):
data_root, ann_file, classes, palette, scene_idxs, data_prefix, \
......@@ -186,7 +186,7 @@ class TestScanNetDataset(unittest.TestCase):
scannet_seg_dataset = ScanNetSegDataset(
data_root,
ann_file,
metainfo=dict(CLASSES=classes, PALETTE=palette),
metainfo=dict(classes=classes, palette=palette),
data_prefix=data_prefix,
pipeline=pipeline,
modality=modality,
......
......@@ -72,7 +72,7 @@ class TestSemanticKITTIDataset(unittest.TestCase):
semantickitti_dataset = SemanticKITTIDataset(
data_root,
ann_file,
metainfo=dict(CLASSES=classes, PALETTE=palette),
metainfo=dict(classes=classes, palette=palette),
data_prefix=data_prefix,
pipeline=pipeline,
modality=modality)
......
......@@ -48,7 +48,7 @@ class TestScanNetDataset(unittest.TestCase):
ann_file,
data_prefix=data_prefix,
pipeline=pipeline,
metainfo=dict(CLASSES=classes),
metainfo=dict(classes=classes),
modality=modality)
scannet_dataset.prepare_data(0)
......@@ -81,7 +81,7 @@ class TestScanNetDataset(unittest.TestCase):
ann_file,
data_prefix=data_prefix,
pipeline=pipeline,
metainfo=dict(CLASSES=classes),
metainfo=dict(classes=classes),
modality=modality)
input_dict = bed_scannet_dataset.get_data_info(0)
......@@ -94,4 +94,4 @@ class TestScanNetDataset(unittest.TestCase):
assert ann_info['gt_labels_3d'].dtype == np.int64
# all instance have been filtered by classes
self.assertEqual(len(ann_info['gt_labels_3d']), 3)
self.assertEqual(len(bed_scannet_dataset.metainfo['CLASSES']), 1)
self.assertEqual(len(bed_scannet_dataset.metainfo['classes']), 1)
......@@ -49,7 +49,7 @@ class TestIndoorMetric(unittest.TestCase):
pred_dict['eval_ann_info'] = eval_ann_info
indoor_metric.dataset_meta = {
'CLASSES': ('cabinet', 'bed', 'chair', 'sofa', 'table', 'door',
'classes': ('cabinet', 'bed', 'chair', 'sofa', 'table', 'door',
'window', 'bookshelf', 'picture', 'counter', 'desk',
'curtain', 'refrigerator', 'showercurtrain', 'toilet',
'sink', 'bathtub', 'garbagebin'),
......
......@@ -67,7 +67,7 @@ class TestInstanceSegMetric(unittest.TestCase):
'curtain', 'refrigerator', 'showercurtrain', 'toilet',
'sink', 'bathtub', 'garbagebin')
dataset_meta = dict(
seg_valid_class_ids=seg_valid_class_ids, CLASSES=class_labels)
seg_valid_class_ids=seg_valid_class_ids, classes=class_labels)
instance_seg_metric = InstanceSegMetric()
instance_seg_metric.dataset_meta = dataset_meta
instance_seg_metric.process(data_batch, predictions)
......
......@@ -53,7 +53,7 @@ def test_multi_modal_kitti_metric():
pytest.skip('test requires GPU and torch+cuda')
kittimetric = KittiMetric(
data_root + '/kitti_infos_train.pkl', metric=['mAP'])
kittimetric.dataset_meta = dict(CLASSES=['Pedestrian', 'Cyclist', 'Car'])
kittimetric.dataset_meta = dict(classes=['Pedestrian', 'Cyclist', 'Car'])
data_batch, predictions = _init_multi_modal_evaluate_input()
kittimetric.process(data_batch, predictions)
ap_dict = kittimetric.compute_metrics(kittimetric.results)
......@@ -76,7 +76,7 @@ def test_kitti_metric_mAP():
pytest.skip('test requires GPU and torch+cuda')
kittimetric = KittiMetric(
data_root + '/kitti_infos_train.pkl', metric=['mAP'])
kittimetric.dataset_meta = dict(CLASSES=['Pedestrian', 'Cyclist', 'Car'])
kittimetric.dataset_meta = dict(classes=['Pedestrian', 'Cyclist', 'Car'])
data_batch, predictions = _init_evaluate_input()
kittimetric.process(data_batch, predictions)
ap_dict = kittimetric.compute_metrics(kittimetric.results)
......
......@@ -243,7 +243,7 @@ def create_groundtruth_database(dataset_class_name,
image_idx = example['sample_idx']
points = example['points'].tensor.numpy()
gt_boxes_3d = annos['gt_bboxes_3d'].tensor.numpy()
names = [dataset.metainfo['CLASSES'][i] for i in annos['gt_labels_3d']]
names = [dataset.metainfo['classes'][i] for i in annos['gt_labels_3d']]
group_dict = dict()
if 'group_ids' in annos:
group_ids = annos['group_ids']
......@@ -409,7 +409,7 @@ class GTDatabaseCreater:
points = example['points'].tensor.numpy()
gt_boxes_3d = annos['gt_bboxes_3d'].tensor.numpy()
names = [
self.dataset.metainfo['CLASSES'][i] for i in annos['gt_labels_3d']
self.dataset.metainfo['classes'][i] for i in annos['gt_labels_3d']
]
group_dict = dict()
if 'group_ids' in annos:
......
......@@ -261,7 +261,7 @@ def update_nuscenes_infos(pkl_path, out_dir):
print(f'Reading from input file: {pkl_path}.')
data_list = mmengine.load(pkl_path)
METAINFO = {
'CLASSES':
'classes':
('car', 'truck', 'trailer', 'bus', 'construction_vehicle', 'bicycle',
'motorcycle', 'pedestrian', 'traffic_cone', 'barrier'),
}
......@@ -340,8 +340,8 @@ def update_nuscenes_infos(pkl_path, out_dir):
empty_instance = get_empty_instance()
empty_instance['bbox_3d'] = ori_info_dict['gt_boxes'][
i, :].tolist()
if ori_info_dict['gt_names'][i] in METAINFO['CLASSES']:
empty_instance['bbox_label'] = METAINFO['CLASSES'].index(
if ori_info_dict['gt_names'][i] in METAINFO['classes']:
empty_instance['bbox_label'] = METAINFO['classes'].index(
ori_info_dict['gt_names'][i])
else:
ignore_class_name.add(ori_info_dict['gt_names'][i])
......@@ -386,7 +386,7 @@ def update_kitti_infos(pkl_path, out_dir):
# TODO update to full label
# TODO discuss how to process 'Van', 'DontCare'
METAINFO = {
'CLASSES': ('Pedestrian', 'Cyclist', 'Car', 'Van', 'Truck',
'classes': ('Pedestrian', 'Cyclist', 'Car', 'Van', 'Truck',
'Person_sitting', 'Tram', 'Misc'),
}
print(f'Reading from input file: {pkl_path}.')
......@@ -451,8 +451,8 @@ def update_kitti_infos(pkl_path, out_dir):
empty_instance = get_empty_instance()
empty_instance['bbox'] = anns['bbox'][instance_id].tolist()
if anns['name'][instance_id] in METAINFO['CLASSES']:
empty_instance['bbox_label'] = METAINFO['CLASSES'].index(
if anns['name'][instance_id] in METAINFO['classes']:
empty_instance['bbox_label'] = METAINFO['classes'].index(
anns['name'][instance_id])
else:
ignore_class_name.add(anns['name'][instance_id])
......@@ -522,7 +522,7 @@ def update_s3dis_infos(pkl_path, out_dir):
print(f'Warning, you may overwriting '
f'the original data {pkl_path}.')
time.sleep(5)
METAINFO = {'CLASSES': ('table', 'chair', 'sofa', 'bookcase', 'board')}
METAINFO = {'classes': ('table', 'chair', 'sofa', 'bookcase', 'board')}
print(f'Reading from input file: {pkl_path}.')
data_list = mmengine.load(pkl_path)
print('Start updating:')
......@@ -554,12 +554,12 @@ def update_s3dis_infos(pkl_path, out_dir):
empty_instance['bbox_3d'] = anns['gt_boxes_upright_depth'][
instance_id].tolist()
if anns['class'][instance_id] < len(METAINFO['CLASSES']):
if anns['class'][instance_id] < len(METAINFO['classes']):
empty_instance['bbox_label_3d'] = anns['class'][
instance_id]
else:
ignore_class_name.add(
METAINFO['CLASSES'][anns['class'][instance_id]])
METAINFO['classes'][anns['class'][instance_id]])
empty_instance['bbox_label_3d'] = -1
empty_instance = clear_instance_unused_keys(empty_instance)
......@@ -593,7 +593,7 @@ def update_scannet_infos(pkl_path, out_dir):
f'the original data {pkl_path}.')
time.sleep(5)
METAINFO = {
'CLASSES':
'classes':
('cabinet', 'bed', 'chair', 'sofa', 'table', 'door', 'window',
'bookshelf', 'picture', 'counter', 'desk', 'curtain', 'refrigerator',
'showercurtrain', 'toilet', 'sink', 'bathtub', 'garbagebin')
......@@ -629,9 +629,9 @@ def update_scannet_infos(pkl_path, out_dir):
empty_instance['bbox_3d'] = anns['gt_boxes_upright_depth'][
instance_id].tolist()
if anns['name'][instance_id] in METAINFO['CLASSES']:
if anns['name'][instance_id] in METAINFO['classes']:
empty_instance['bbox_label_3d'] = METAINFO[
'CLASSES'].index(anns['name'][instance_id])
'classes'].index(anns['name'][instance_id])
else:
ignore_class_name.add(anns['name'][instance_id])
empty_instance['bbox_label_3d'] = -1
......@@ -667,7 +667,7 @@ def update_sunrgbd_infos(pkl_path, out_dir):
f'the original data {pkl_path}.')
time.sleep(5)
METAINFO = {
'CLASSES': ('bed', 'table', 'sofa', 'chair', 'toilet', 'desk',
'classes': ('bed', 'table', 'sofa', 'chair', 'toilet', 'desk',
'dresser', 'night_stand', 'bookshelf', 'bathtub')
}
print(f'Reading from input file: {pkl_path}.')
......@@ -705,9 +705,9 @@ def update_sunrgbd_infos(pkl_path, out_dir):
empty_instance['bbox_3d'] = anns['gt_boxes_upright_depth'][
instance_id].tolist()
empty_instance['bbox'] = anns['bbox'][instance_id].tolist()
if anns['name'][instance_id] in METAINFO['CLASSES']:
if anns['name'][instance_id] in METAINFO['classes']:
empty_instance['bbox_label_3d'] = METAINFO[
'CLASSES'].index(anns['name'][instance_id])
'classes'].index(anns['name'][instance_id])
empty_instance['bbox_label'] = empty_instance[
'bbox_label_3d']
else:
......@@ -746,7 +746,7 @@ def update_lyft_infos(pkl_path, out_dir):
print(f'Reading from input file: {pkl_path}.')
data_list = mmengine.load(pkl_path)
METAINFO = {
'CLASSES':
'classes':
('car', 'truck', 'bus', 'emergency_vehicle', 'other_vehicle',
'motorcycle', 'bicycle', 'pedestrian', 'animal'),
}
......@@ -817,8 +817,8 @@ def update_lyft_infos(pkl_path, out_dir):
empty_instance = get_empty_instance()
empty_instance['bbox_3d'] = ori_info_dict['gt_boxes'][
i, :].tolist()
if ori_info_dict['gt_names'][i] in METAINFO['CLASSES']:
empty_instance['bbox_label'] = METAINFO['CLASSES'].index(
if ori_info_dict['gt_names'][i] in METAINFO['classes']:
empty_instance['bbox_label'] = METAINFO['classes'].index(
ori_info_dict['gt_names'][i])
else:
ignore_class_name.add(ori_info_dict['gt_names'][i])
......@@ -865,7 +865,7 @@ def update_waymo_infos(pkl_path, out_dir):
# TODO update to full label
# TODO discuss how to process 'Van', 'DontCare'
METAINFO = {
'CLASSES': ('Car', 'Pedestrian', 'Cyclist', 'Sign'),
'classes': ('Car', 'Pedestrian', 'Cyclist', 'Sign'),
}
print(f'Reading from input file: {pkl_path}.')
data_list = mmengine.load(pkl_path)
......@@ -955,8 +955,8 @@ def update_waymo_infos(pkl_path, out_dir):
empty_instance = get_empty_instance()
empty_instance['bbox'] = anns['bbox'][instance_id].tolist()
if anns['name'][instance_id] in METAINFO['CLASSES']:
empty_instance['bbox_label'] = METAINFO['CLASSES'].index(
if anns['name'][instance_id] in METAINFO['classes']:
empty_instance['bbox_label'] = METAINFO['classes'].index(
anns['name'][instance_id])
else:
ignore_class_name.add(anns['name'][instance_id])
......@@ -999,8 +999,8 @@ def update_waymo_infos(pkl_path, out_dir):
empty_instance = get_empty_instance()
empty_instance['bbox'] = anns['bbox'][instance_id].tolist()
if anns['name'][instance_id] in METAINFO['CLASSES']:
empty_instance['bbox_label'] = METAINFO['CLASSES'].index(
if anns['name'][instance_id] in METAINFO['classes']:
empty_instance['bbox_label'] = METAINFO['classes'].index(
anns['name'][instance_id])
else:
ignore_class_name.add(anns['name'][instance_id])
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment