Commit ea6e9f7f authored by Lei Tian's avatar Lei Tian Committed by Facebook GitHub Bot
Browse files

fix optimizer setting in pytorch lightning

Summary: fix optimizer setting in pytorch lightning

Reviewed By: wat3rBro

Differential Revision: D30988441

fbshipit-source-id: fcd2f4c77a87a790d7e99b0e3c833c291fd66e77
parent d07a58f3
...@@ -23,6 +23,7 @@ from d2go.modeling.quantization import ( ...@@ -23,6 +23,7 @@ from d2go.modeling.quantization import (
default_prepare_for_quant, default_prepare_for_quant,
default_prepare_for_quant_convert, default_prepare_for_quant_convert,
) )
from d2go.optimizer import build_optimizer_mapper
from d2go.runner.callbacks.quantization import maybe_prepare_for_quantization, PREPARED from d2go.runner.callbacks.quantization import maybe_prepare_for_quantization, PREPARED
from d2go.runner.default_runner import ( from d2go.runner.default_runner import (
Detectron2GoRunner, Detectron2GoRunner,
...@@ -225,7 +226,7 @@ class DefaultTask(pl.LightningModule): ...@@ -225,7 +226,7 @@ class DefaultTask(pl.LightningModule):
def configure_optimizers( def configure_optimizers(
self, self,
) -> Tuple[List[torch.optim.Optimizer], List]: ) -> Tuple[List[torch.optim.Optimizer], List]:
optim = d2_build_optimizer(self.cfg, self.model) optim = build_optimizer_mapper(self.cfg, self.model)
lr_scheduler = d2_build_lr_scheduler(self.cfg, optim) lr_scheduler = d2_build_lr_scheduler(self.cfg, optim)
return [optim], [{"scheduler": lr_scheduler, "interval": "step"}] return [optim], [{"scheduler": lr_scheduler, "interval": "step"}]
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment