Commit 1d76a7c0 authored by zhangwenwei's avatar zhangwenwei
Browse files

Add momentum scheduler

parent 99397168
...@@ -90,14 +90,15 @@ def train_detector(model, ...@@ -90,14 +90,15 @@ def train_detector(model,
if fp16_cfg is not None: if fp16_cfg is not None:
optimizer_config = Fp16OptimizerHook( optimizer_config = Fp16OptimizerHook(
**cfg.optimizer_config, **fp16_cfg, distributed=distributed) **cfg.optimizer_config, **fp16_cfg, distributed=distributed)
elif distributed: elif distributed and 'type' not in cfg.optimizer_config:
optimizer_config = DistOptimizerHook(**cfg.optimizer_config) optimizer_config = DistOptimizerHook(**cfg.optimizer_config)
else: else:
optimizer_config = cfg.optimizer_config optimizer_config = cfg.optimizer_config
# register hooks # register hooks
runner.register_training_hooks(cfg.lr_config, optimizer_config, runner.register_training_hooks(cfg.lr_config, optimizer_config,
cfg.checkpoint_config, cfg.log_config) cfg.checkpoint_config, cfg.log_config,
cfg.get('momentum_config', None))
if distributed: if distributed:
runner.register_hook(DistSamplerSeedHook()) runner.register_hook(DistSamplerSeedHook())
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment