# This schedule is mainly used by models with dynamic voxelization # optimizer lr = 0.003 # max learning rate optimizer = dict( type='AdamW', lr=lr, betas=(0.95, 0.99), # the momentum is change during training weight_decay=0.001) optimizer_config = dict(grad_clip=dict(max_norm=10, norm_type=2)) lr_config = dict( policy='CosineAnnealing', warmup='linear', warmup_iters=1000, warmup_ratio=1.0 / 10, min_lr_ratio=1e-5) momentum_config = None runner = dict(type='EpochBasedRunner', max_epochs=40)