Unverified Commit a481f5a8 authored by xiliu8006's avatar xiliu8006 Committed by GitHub
Browse files

[Enhance] Move train_cfg test_cfg to model (#307)

* Move train_cfg/test_cfg to model

* Move train_cfg/test_cfg to model

* Move train_cfg/test_cfg to model

* Move train_cfg/test_cfg to model

* Move train_cfg/test_cfg to model

* Move train_cfg/test_cfg to model

* Move train_cfg/test_cfg to model

* Move train_cfg and test_cfg into model

* modify centerpoint configs

* Modify docs

* modify build_detector

* modify test_config_build_detector

* modify build_detector parameters

* Adopt the same strategy in build_detector
parent a347ac75
...@@ -65,17 +65,16 @@ model = dict( ...@@ -65,17 +65,16 @@ model = dict(
type='SmoothL1Loss', reduction='sum', loss_weight=1.0), type='SmoothL1Loss', reduction='sum', loss_weight=1.0),
corner_loss=dict( corner_loss=dict(
type='SmoothL1Loss', reduction='sum', loss_weight=1.0), type='SmoothL1Loss', reduction='sum', loss_weight=1.0),
vote_loss=dict(type='SmoothL1Loss', reduction='sum', loss_weight=1.0))) vote_loss=dict(type='SmoothL1Loss', reduction='sum', loss_weight=1.0)),
# model training and testing settings
# model training and testing settings train_cfg=dict(
train_cfg = dict( sample_mod='spec', pos_distance_thr=10.0, expand_dims_length=0.05),
sample_mod='spec', pos_distance_thr=10.0, expand_dims_length=0.05) test_cfg=dict(
test_cfg = dict(
nms_cfg=dict(type='nms', iou_thr=0.1), nms_cfg=dict(type='nms', iou_thr=0.1),
sample_mod='spec', sample_mod='spec',
score_thr=0.0, score_thr=0.0,
per_class_proposal=True, per_class_proposal=True,
max_output_num=100) max_output_num=100))
# optimizer # optimizer
# This schedule is mainly used by models on indoor dataset, # This schedule is mainly used by models on indoor dataset,
......
...@@ -105,9 +105,9 @@ model = dict( ...@@ -105,9 +105,9 @@ model = dict(
conv_out_channels=256, conv_out_channels=256,
num_classes=80, num_classes=80,
loss_mask=dict( loss_mask=dict(
type='CrossEntropyLoss', use_mask=True, loss_weight=1.0)))) type='CrossEntropyLoss', use_mask=True, loss_weight=1.0))),
# model training and testing settings # model training and testing settings
train_cfg = dict( train_cfg=dict(
rpn=dict( rpn=dict(
assigner=dict( assigner=dict(
type='MaxIoUAssigner', type='MaxIoUAssigner',
...@@ -184,8 +184,8 @@ train_cfg = dict( ...@@ -184,8 +184,8 @@ train_cfg = dict(
mask_size=28, mask_size=28,
pos_weight=-1, pos_weight=-1,
debug=False) debug=False)
]) ]),
test_cfg = dict( test_cfg=dict(
rpn=dict( rpn=dict(
nms_across_levels=False, nms_across_levels=False,
nms_pre=1000, nms_pre=1000,
...@@ -197,4 +197,4 @@ test_cfg = dict( ...@@ -197,4 +197,4 @@ test_cfg = dict(
score_thr=0.05, score_thr=0.05,
nms=dict(type='nms', iou_threshold=0.5), nms=dict(type='nms', iou_threshold=0.5),
max_per_img=100, max_per_img=100,
mask_thr_binary=0.5)) mask_thr_binary=0.5)))
...@@ -56,9 +56,9 @@ model = dict( ...@@ -56,9 +56,9 @@ model = dict(
type='SeparateHead', init_bias=-2.19, final_kernel=3), type='SeparateHead', init_bias=-2.19, final_kernel=3),
loss_cls=dict(type='GaussianFocalLoss', reduction='mean'), loss_cls=dict(type='GaussianFocalLoss', reduction='mean'),
loss_bbox=dict(type='L1Loss', reduction='mean', loss_weight=0.25), loss_bbox=dict(type='L1Loss', reduction='mean', loss_weight=0.25),
norm_bbox=True)) norm_bbox=True),
# model training and testing settings # model training and testing settings
train_cfg = dict( train_cfg=dict(
pts=dict( pts=dict(
grid_size=[1024, 1024, 40], grid_size=[1024, 1024, 40],
voxel_size=voxel_size, voxel_size=voxel_size,
...@@ -67,8 +67,8 @@ train_cfg = dict( ...@@ -67,8 +67,8 @@ train_cfg = dict(
gaussian_overlap=0.1, gaussian_overlap=0.1,
max_objs=500, max_objs=500,
min_radius=2, min_radius=2,
code_weights=[1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.2, 0.2])) code_weights=[1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.2, 0.2])),
test_cfg = dict( test_cfg=dict(
pts=dict( pts=dict(
post_center_limit_range=[-61.2, -61.2, -10.0, 61.2, 61.2, 10.0], post_center_limit_range=[-61.2, -61.2, -10.0, 61.2, 61.2, 10.0],
max_per_img=500, max_per_img=500,
...@@ -80,4 +80,4 @@ test_cfg = dict( ...@@ -80,4 +80,4 @@ test_cfg = dict(
nms_type='rotate', nms_type='rotate',
pre_max_size=1000, pre_max_size=1000,
post_max_size=83, post_max_size=83,
nms_thr=0.2)) nms_thr=0.2)))
...@@ -55,9 +55,9 @@ model = dict( ...@@ -55,9 +55,9 @@ model = dict(
type='SeparateHead', init_bias=-2.19, final_kernel=3), type='SeparateHead', init_bias=-2.19, final_kernel=3),
loss_cls=dict(type='GaussianFocalLoss', reduction='mean'), loss_cls=dict(type='GaussianFocalLoss', reduction='mean'),
loss_bbox=dict(type='L1Loss', reduction='mean', loss_weight=0.25), loss_bbox=dict(type='L1Loss', reduction='mean', loss_weight=0.25),
norm_bbox=True)) norm_bbox=True),
# model training and testing settings # model training and testing settings
train_cfg = dict( train_cfg=dict(
pts=dict( pts=dict(
grid_size=[512, 512, 1], grid_size=[512, 512, 1],
voxel_size=voxel_size, voxel_size=voxel_size,
...@@ -66,8 +66,8 @@ train_cfg = dict( ...@@ -66,8 +66,8 @@ train_cfg = dict(
gaussian_overlap=0.1, gaussian_overlap=0.1,
max_objs=500, max_objs=500,
min_radius=2, min_radius=2,
code_weights=[1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.2, 0.2])) code_weights=[1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.2, 0.2])),
test_cfg = dict( test_cfg=dict(
pts=dict( pts=dict(
post_center_limit_range=[-61.2, -61.2, -10.0, 61.2, 61.2, 10.0], post_center_limit_range=[-61.2, -61.2, -10.0, 61.2, 61.2, 10.0],
max_per_img=500, max_per_img=500,
...@@ -80,4 +80,4 @@ test_cfg = dict( ...@@ -80,4 +80,4 @@ test_cfg = dict(
nms_type='rotate', nms_type='rotate',
pre_max_size=1000, pre_max_size=1000,
post_max_size=83, post_max_size=83,
nms_thr=0.2)) nms_thr=0.2)))
...@@ -311,11 +311,11 @@ model = dict( ...@@ -311,11 +311,11 @@ model = dict(
reduction='none', reduction='none',
loss_weight=5.0), loss_weight=5.0),
primitive_center_loss=dict( primitive_center_loss=dict(
type='MSELoss', reduction='none', loss_weight=1.0)))) type='MSELoss', reduction='none', loss_weight=1.0))),
# model training and testing settings
# model training and testing settings train_cfg=dict(
train_cfg = dict( rpn=dict(
rpn=dict(pos_distance_thr=0.3, neg_distance_thr=0.6, sample_mod='vote'), pos_distance_thr=0.3, neg_distance_thr=0.6, sample_mod='vote'),
rpn_proposal=dict(use_nms=False), rpn_proposal=dict(use_nms=False),
rcnn=dict( rcnn=dict(
pos_distance_thr=0.3, pos_distance_thr=0.3,
...@@ -326,9 +326,8 @@ train_cfg = dict( ...@@ -326,9 +326,8 @@ train_cfg = dict(
mask_surface_threshold=0.3, mask_surface_threshold=0.3,
label_surface_threshold=0.3, label_surface_threshold=0.3,
mask_line_threshold=0.3, mask_line_threshold=0.3,
label_line_threshold=0.3)) label_line_threshold=0.3)),
test_cfg=dict(
test_cfg = dict(
rpn=dict( rpn=dict(
sample_mod='seed', sample_mod='seed',
nms_thr=0.25, nms_thr=0.25,
...@@ -339,4 +338,4 @@ test_cfg = dict( ...@@ -339,4 +338,4 @@ test_cfg = dict(
sample_mod='seed', sample_mod='seed',
nms_thr=0.25, nms_thr=0.25,
score_thr=0.05, score_thr=0.05,
per_class_proposal=True)) per_class_proposal=True)))
...@@ -17,6 +17,6 @@ model = dict( ...@@ -17,6 +17,6 @@ model = dict(
num_classes=9, num_classes=9,
anchor_generator=dict( anchor_generator=dict(
ranges=[[-80, -80, -1.8, 80, 80, -1.8]], custom_values=[]), ranges=[[-80, -80, -1.8, 80, 80, -1.8]], custom_values=[]),
bbox_coder=dict(type='DeltaXYZWLHRBBoxCoder', code_size=7))) bbox_coder=dict(type='DeltaXYZWLHRBBoxCoder', code_size=7)),
# model training settings (based on nuScenes model settings) # model training settings (based on nuScenes model settings)
train_cfg = dict(pts=dict(code_weight=[1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0])) train_cfg=dict(pts=dict(code_weight=[1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0])))
...@@ -70,9 +70,9 @@ model = dict( ...@@ -70,9 +70,9 @@ model = dict(
loss_weight=1.0), loss_weight=1.0),
loss_bbox=dict(type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.0), loss_bbox=dict(type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.0),
loss_dir=dict( loss_dir=dict(
type='CrossEntropyLoss', use_sigmoid=False, loss_weight=0.2))) type='CrossEntropyLoss', use_sigmoid=False, loss_weight=0.2)),
# model training and testing settings # model training and testing settings
train_cfg = dict( train_cfg=dict(
pts=dict( pts=dict(
assigner=dict( assigner=dict(
type='MaxIoUAssigner', type='MaxIoUAssigner',
...@@ -84,8 +84,8 @@ train_cfg = dict( ...@@ -84,8 +84,8 @@ train_cfg = dict(
allowed_border=0, allowed_border=0,
code_weight=[1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.2, 0.2], code_weight=[1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.2, 0.2],
pos_weight=-1, pos_weight=-1,
debug=False)) debug=False)),
test_cfg = dict( test_cfg=dict(
pts=dict( pts=dict(
use_rotate_nms=True, use_rotate_nms=True,
nms_across_levels=False, nms_across_levels=False,
...@@ -93,4 +93,4 @@ test_cfg = dict( ...@@ -93,4 +93,4 @@ test_cfg = dict(
nms_thr=0.2, nms_thr=0.2,
score_thr=0.05, score_thr=0.05,
min_bbox_size=0, min_bbox_size=0,
max_num=500)) max_num=500)))
...@@ -17,6 +17,6 @@ model = dict( ...@@ -17,6 +17,6 @@ model = dict(
num_classes=9, num_classes=9,
anchor_generator=dict( anchor_generator=dict(
ranges=[[-100, -100, -1.8, 100, 100, -1.8]], custom_values=[]), ranges=[[-100, -100, -1.8, 100, 100, -1.8]], custom_values=[]),
bbox_coder=dict(type='DeltaXYZWLHRBBoxCoder', code_size=7))) bbox_coder=dict(type='DeltaXYZWLHRBBoxCoder', code_size=7)),
# model training settings (based on nuScenes model settings) # model training settings (based on nuScenes model settings)
train_cfg = dict(pts=dict(code_weight=[1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0])) train_cfg=dict(pts=dict(code_weight=[1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0])))
...@@ -52,9 +52,9 @@ model = dict( ...@@ -52,9 +52,9 @@ model = dict(
loss_weight=1.0), loss_weight=1.0),
loss_bbox=dict(type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=2.0), loss_bbox=dict(type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=2.0),
loss_dir=dict( loss_dir=dict(
type='CrossEntropyLoss', use_sigmoid=False, loss_weight=0.2))) type='CrossEntropyLoss', use_sigmoid=False, loss_weight=0.2)),
# model training and testing settings # model training and testing settings
train_cfg = dict( train_cfg=dict(
assigner=[ assigner=[
dict( # for Pedestrian dict( # for Pedestrian
type='MaxIoUAssigner', type='MaxIoUAssigner',
...@@ -80,12 +80,12 @@ train_cfg = dict( ...@@ -80,12 +80,12 @@ train_cfg = dict(
], ],
allowed_border=0, allowed_border=0,
pos_weight=-1, pos_weight=-1,
debug=False) debug=False),
test_cfg = dict( test_cfg=dict(
use_rotate_nms=True, use_rotate_nms=True,
nms_across_levels=False, nms_across_levels=False,
nms_thr=0.01, nms_thr=0.01,
score_thr=0.1, score_thr=0.1,
min_bbox_size=0, min_bbox_size=0,
nms_pre=100, nms_pre=100,
max_num=50) max_num=50))
...@@ -66,9 +66,9 @@ model = dict( ...@@ -66,9 +66,9 @@ model = dict(
loss_weight=1.0), loss_weight=1.0),
loss_bbox=dict(type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.0), loss_bbox=dict(type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.0),
loss_dir=dict( loss_dir=dict(
type='CrossEntropyLoss', use_sigmoid=False, loss_weight=0.2))) type='CrossEntropyLoss', use_sigmoid=False, loss_weight=0.2)),
# model training and testing settings # model training and testing settings
train_cfg = dict( train_cfg=dict(
pts=dict( pts=dict(
assigner=[ assigner=[
dict( # car dict( # car
...@@ -96,9 +96,8 @@ train_cfg = dict( ...@@ -96,9 +96,8 @@ train_cfg = dict(
allowed_border=0, allowed_border=0,
code_weight=[1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0], code_weight=[1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0],
pos_weight=-1, pos_weight=-1,
debug=False)) debug=False)),
test_cfg=dict(
test_cfg = dict(
pts=dict( pts=dict(
use_rotate_nms=True, use_rotate_nms=True,
nms_across_levels=False, nms_across_levels=False,
...@@ -106,4 +105,4 @@ test_cfg = dict( ...@@ -106,4 +105,4 @@ test_cfg = dict(
nms_thr=0.25, nms_thr=0.25,
score_thr=0.1, score_thr=0.1,
min_bbox_size=0, min_bbox_size=0,
max_num=500)) max_num=500)))
...@@ -48,9 +48,9 @@ model = dict( ...@@ -48,9 +48,9 @@ model = dict(
loss_weight=1.0), loss_weight=1.0),
loss_bbox=dict(type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=2.0), loss_bbox=dict(type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=2.0),
loss_dir=dict( loss_dir=dict(
type='CrossEntropyLoss', use_sigmoid=False, loss_weight=0.2))) type='CrossEntropyLoss', use_sigmoid=False, loss_weight=0.2)),
# model training and testing settings # model training and testing settings
train_cfg = dict( train_cfg=dict(
assigner=[ assigner=[
dict( # for Pedestrian dict( # for Pedestrian
type='MaxIoUAssigner', type='MaxIoUAssigner',
...@@ -76,12 +76,12 @@ train_cfg = dict( ...@@ -76,12 +76,12 @@ train_cfg = dict(
], ],
allowed_border=0, allowed_border=0,
pos_weight=-1, pos_weight=-1,
debug=False) debug=False),
test_cfg = dict( test_cfg=dict(
use_rotate_nms=True, use_rotate_nms=True,
nms_across_levels=False, nms_across_levels=False,
nms_thr=0.01, nms_thr=0.01,
score_thr=0.1, score_thr=0.1,
min_bbox_size=0, min_bbox_size=0,
nms_pre=100, nms_pre=100,
max_num=50) max_num=50))
...@@ -60,9 +60,9 @@ model = dict( ...@@ -60,9 +60,9 @@ model = dict(
loss_weight=1.0), loss_weight=1.0),
loss_bbox=dict(type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.0), loss_bbox=dict(type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.0),
loss_dir=dict( loss_dir=dict(
type='CrossEntropyLoss', use_sigmoid=False, loss_weight=0.2))) type='CrossEntropyLoss', use_sigmoid=False, loss_weight=0.2)),
# model training and testing settings # model training and testing settings
train_cfg = dict( train_cfg=dict(
assigner=[ assigner=[
dict( # car dict( # car
type='MaxIoUAssigner', type='MaxIoUAssigner',
...@@ -89,13 +89,12 @@ train_cfg = dict( ...@@ -89,13 +89,12 @@ train_cfg = dict(
allowed_border=0, allowed_border=0,
code_weight=[1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0], code_weight=[1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0],
pos_weight=-1, pos_weight=-1,
debug=False) debug=False),
test_cfg=dict(
test_cfg = dict(
use_rotate_nms=True, use_rotate_nms=True,
nms_across_levels=False, nms_across_levels=False,
nms_pre=4096, nms_pre=4096,
nms_thr=0.25, nms_thr=0.25,
score_thr=0.1, score_thr=0.1,
min_bbox_size=0, min_bbox_size=0,
max_num=500) max_num=500))
...@@ -65,9 +65,9 @@ model = dict( ...@@ -65,9 +65,9 @@ model = dict(
conv_out_channels=256, conv_out_channels=256,
num_classes=80, num_classes=80,
loss_mask=dict( loss_mask=dict(
type='CrossEntropyLoss', use_mask=True, loss_weight=1.0)))) type='CrossEntropyLoss', use_mask=True, loss_weight=1.0))),
# model training and testing settings # model training and testing settings
train_cfg = dict( train_cfg=dict(
rpn=dict( rpn=dict(
assigner=dict( assigner=dict(
type='MaxIoUAssigner', type='MaxIoUAssigner',
...@@ -108,8 +108,8 @@ train_cfg = dict( ...@@ -108,8 +108,8 @@ train_cfg = dict(
add_gt_as_proposals=True), add_gt_as_proposals=True),
mask_size=28, mask_size=28,
pos_weight=-1, pos_weight=-1,
debug=False)) debug=False)),
test_cfg = dict( test_cfg=dict(
rpn=dict( rpn=dict(
nms_across_levels=False, nms_across_levels=False,
nms_pre=1000, nms_pre=1000,
...@@ -121,4 +121,4 @@ test_cfg = dict( ...@@ -121,4 +121,4 @@ test_cfg = dict(
score_thr=0.05, score_thr=0.05,
nms=dict(type='nms', iou_threshold=0.5), nms=dict(type='nms', iou_threshold=0.5),
max_per_img=100, max_per_img=100,
mask_thr_binary=0.5)) mask_thr_binary=0.5)))
...@@ -62,8 +62,12 @@ model = dict( ...@@ -62,8 +62,12 @@ model = dict(
size_res_loss=dict( size_res_loss=dict(
type='SmoothL1Loss', reduction='sum', loss_weight=10.0 / 3.0), type='SmoothL1Loss', reduction='sum', loss_weight=10.0 / 3.0),
semantic_loss=dict( semantic_loss=dict(
type='CrossEntropyLoss', reduction='sum', loss_weight=1.0))) type='CrossEntropyLoss', reduction='sum', loss_weight=1.0)),
# model training and testing settings # model training and testing settings
train_cfg = dict(pos_distance_thr=0.3, neg_distance_thr=0.6, sample_mod='vote') train_cfg=dict(
test_cfg = dict( pos_distance_thr=0.3, neg_distance_thr=0.6, sample_mod='vote'),
sample_mod='seed', nms_thr=0.25, score_thr=0.05, per_class_proposal=True) test_cfg=dict(
sample_mod='seed',
nms_thr=0.25,
score_thr=0.05,
per_class_proposal=True))
...@@ -111,9 +111,9 @@ model = dict( ...@@ -111,9 +111,9 @@ model = dict(
type='CrossEntropyLoss', type='CrossEntropyLoss',
use_sigmoid=True, use_sigmoid=True,
reduction='sum', reduction='sum',
loss_weight=1.0)))) loss_weight=1.0))),
# model training and testing settings # model training and testing settings
train_cfg = dict( train_cfg=dict(
rpn=dict( rpn=dict(
assigner=[ assigner=[
dict( # for Pedestrian dict( # for Pedestrian
...@@ -152,21 +152,24 @@ train_cfg = dict( ...@@ -152,21 +152,24 @@ train_cfg = dict(
assigner=[ assigner=[
dict( # for Pedestrian dict( # for Pedestrian
type='MaxIoUAssigner', type='MaxIoUAssigner',
iou_calculator=dict(type='BboxOverlaps3D', coordinate='lidar'), iou_calculator=dict(
type='BboxOverlaps3D', coordinate='lidar'),
pos_iou_thr=0.55, pos_iou_thr=0.55,
neg_iou_thr=0.55, neg_iou_thr=0.55,
min_pos_iou=0.55, min_pos_iou=0.55,
ignore_iof_thr=-1), ignore_iof_thr=-1),
dict( # for Cyclist dict( # for Cyclist
type='MaxIoUAssigner', type='MaxIoUAssigner',
iou_calculator=dict(type='BboxOverlaps3D', coordinate='lidar'), iou_calculator=dict(
type='BboxOverlaps3D', coordinate='lidar'),
pos_iou_thr=0.55, pos_iou_thr=0.55,
neg_iou_thr=0.55, neg_iou_thr=0.55,
min_pos_iou=0.55, min_pos_iou=0.55,
ignore_iof_thr=-1), ignore_iof_thr=-1),
dict( # for Car dict( # for Car
type='MaxIoUAssigner', type='MaxIoUAssigner',
iou_calculator=dict(type='BboxOverlaps3D', coordinate='lidar'), iou_calculator=dict(
type='BboxOverlaps3D', coordinate='lidar'),
pos_iou_thr=0.55, pos_iou_thr=0.55,
neg_iou_thr=0.55, neg_iou_thr=0.55,
min_pos_iou=0.55, min_pos_iou=0.55,
...@@ -182,8 +185,8 @@ train_cfg = dict( ...@@ -182,8 +185,8 @@ train_cfg = dict(
add_gt_as_proposals=False, add_gt_as_proposals=False,
return_iou=True), return_iou=True),
cls_pos_thr=0.75, cls_pos_thr=0.75,
cls_neg_thr=0.25)) cls_neg_thr=0.25)),
test_cfg = dict( test_cfg=dict(
rpn=dict( rpn=dict(
nms_pre=1024, nms_pre=1024,
nms_post=100, nms_post=100,
...@@ -192,7 +195,10 @@ test_cfg = dict( ...@@ -192,7 +195,10 @@ test_cfg = dict(
score_thr=0, score_thr=0,
use_rotate_nms=True), use_rotate_nms=True),
rcnn=dict( rcnn=dict(
use_rotate_nms=True, use_raw_score=True, nms_thr=0.01, score_thr=0.3)) use_rotate_nms=True,
use_raw_score=True,
nms_thr=0.01,
score_thr=0.3)))
# dataset settings # dataset settings
dataset_type = 'KittiDataset' dataset_type = 'KittiDataset'
......
...@@ -50,9 +50,9 @@ model = dict( ...@@ -50,9 +50,9 @@ model = dict(
loss_weight=1.0), loss_weight=1.0),
loss_bbox=dict(type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=2.0), loss_bbox=dict(type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=2.0),
loss_dir=dict( loss_dir=dict(
type='CrossEntropyLoss', use_sigmoid=False, loss_weight=0.2))) type='CrossEntropyLoss', use_sigmoid=False, loss_weight=0.2)),
# model training and testing settings # model training and testing settings
train_cfg = dict( train_cfg=dict(
assigner=dict( assigner=dict(
type='MaxIoUAssigner', type='MaxIoUAssigner',
iou_calculator=dict(type='BboxOverlapsNearest3D'), iou_calculator=dict(type='BboxOverlapsNearest3D'),
...@@ -62,15 +62,15 @@ train_cfg = dict( ...@@ -62,15 +62,15 @@ train_cfg = dict(
ignore_iof_thr=-1), ignore_iof_thr=-1),
allowed_border=0, allowed_border=0,
pos_weight=-1, pos_weight=-1,
debug=False) debug=False),
test_cfg = dict( test_cfg=dict(
use_rotate_nms=True, use_rotate_nms=True,
nms_across_levels=False, nms_across_levels=False,
nms_thr=0.01, nms_thr=0.01,
score_thr=0.1, score_thr=0.1,
min_bbox_size=0, min_bbox_size=0,
nms_pre=100, nms_pre=100,
max_num=50) max_num=50))
# dataset settings # dataset settings
dataset_type = 'KittiDataset' dataset_type = 'KittiDataset'
......
...@@ -63,9 +63,8 @@ model = dict( ...@@ -63,9 +63,8 @@ model = dict(
loss_dir=dict( loss_dir=dict(
type='CrossEntropyLoss', use_sigmoid=False, loss_weight=0.2), type='CrossEntropyLoss', use_sigmoid=False, loss_weight=0.2),
), ),
) # model training and testing settings
# model training and testing settings train_cfg=dict(
train_cfg = dict(
assigner=[ assigner=[
dict( # for Pedestrian dict( # for Pedestrian
type='MaxIoUAssigner', type='MaxIoUAssigner',
...@@ -91,15 +90,15 @@ train_cfg = dict( ...@@ -91,15 +90,15 @@ train_cfg = dict(
], ],
allowed_border=0, allowed_border=0,
pos_weight=-1, pos_weight=-1,
debug=False) debug=False),
test_cfg = dict( test_cfg=dict(
use_rotate_nms=True, use_rotate_nms=True,
nms_across_levels=False, nms_across_levels=False,
nms_thr=0.01, nms_thr=0.01,
score_thr=0.1, score_thr=0.1,
min_bbox_size=0, min_bbox_size=0,
nms_pre=100, nms_pre=100,
max_num=50) max_num=50))
# dataset settings # dataset settings
dataset_type = 'KittiDataset' dataset_type = 'KittiDataset'
......
...@@ -52,9 +52,9 @@ model = dict( ...@@ -52,9 +52,9 @@ model = dict(
loss_weight=1.0), loss_weight=1.0),
loss_bbox=dict(type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=2.0), loss_bbox=dict(type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=2.0),
loss_dir=dict( loss_dir=dict(
type='CrossEntropyLoss', use_sigmoid=False, loss_weight=0.2))) type='CrossEntropyLoss', use_sigmoid=False, loss_weight=0.2)),
# model training and testing settings # model training and testing settings
train_cfg = dict( train_cfg=dict(
assigner=[ assigner=[
dict( # for Pedestrian dict( # for Pedestrian
type='MaxIoUAssigner', type='MaxIoUAssigner',
...@@ -80,15 +80,15 @@ train_cfg = dict( ...@@ -80,15 +80,15 @@ train_cfg = dict(
], ],
allowed_border=0, allowed_border=0,
pos_weight=-1, pos_weight=-1,
debug=False) debug=False),
test_cfg = dict( test_cfg=dict(
use_rotate_nms=True, use_rotate_nms=True,
nms_across_levels=False, nms_across_levels=False,
nms_thr=0.01, nms_thr=0.01,
score_thr=0.1, score_thr=0.1,
min_bbox_size=0, min_bbox_size=0,
nms_pre=100, nms_pre=100,
max_num=50) max_num=50))
# dataset settings # dataset settings
dataset_type = 'KittiDataset' dataset_type = 'KittiDataset'
......
...@@ -47,10 +47,11 @@ For example, we change `centerpoint_0075voxel_second_secfpn_circlenms_4x8_cyclic ...@@ -47,10 +47,11 @@ For example, we change `centerpoint_0075voxel_second_secfpn_circlenms_4x8_cyclic
_base_ = './centerpoint_0075voxel_second_secfpn_circlenms' \ _base_ = './centerpoint_0075voxel_second_secfpn_circlenms' \
'_4x8_cyclic_20e_nus.py' '_4x8_cyclic_20e_nus.py'
test_cfg = dict( model = dict(
test_cfg=dict(
pts=dict( pts=dict(
use_rotate_nms=True, use_rotate_nms=True,
max_num=83)) max_num=83)))
point_cloud_range = [-54, -54, -5.0, 54, 54, 3.0] point_cloud_range = [-54, -54, -5.0, 54, 54, 3.0]
file_client_args = dict(backend='disk') file_client_args = dict(backend='disk')
......
...@@ -16,16 +16,14 @@ model = dict( ...@@ -16,16 +16,14 @@ model = dict(
pts_middle_encoder=dict(sparse_shape=[41, 1440, 1440]), pts_middle_encoder=dict(sparse_shape=[41, 1440, 1440]),
pts_bbox_head=dict( pts_bbox_head=dict(
bbox_coder=dict( bbox_coder=dict(
voxel_size=voxel_size[:2], pc_range=point_cloud_range[:2]))) voxel_size=voxel_size[:2], pc_range=point_cloud_range[:2])),
train_cfg=dict(
train_cfg = dict(
pts=dict( pts=dict(
grid_size=[1440, 1440, 40], grid_size=[1440, 1440, 40],
voxel_size=voxel_size, voxel_size=voxel_size,
point_cloud_range=point_cloud_range)) point_cloud_range=point_cloud_range)),
test_cfg=dict(
test_cfg = dict( pts=dict(voxel_size=voxel_size[:2], pc_range=point_cloud_range[:2])))
pts=dict(voxel_size=voxel_size[:2], pc_range=point_cloud_range[:2]))
dataset_type = 'NuScenesDataset' dataset_type = 'NuScenesDataset'
data_root = 'data/nuscenes/' data_root = 'data/nuscenes/'
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment