Unverified Commit cfeb2163 authored by Myle Ott's avatar Myle Ott Committed by GitHub
Browse files

Merge pull request #727 from pytorch/fix_lr_scheduler

Set initial learning rate in LR schedulers by calling step_update(0) at init
parents 2af922f1 219cbf6e
...@@ -117,6 +117,7 @@ class Trainer(object): ...@@ -117,6 +117,7 @@ class Trainer(object):
# We should initialize the learning rate scheduler immediately after # We should initialize the learning rate scheduler immediately after
# building the optimizer, so that the initial learning rate is set. # building the optimizer, so that the initial learning rate is set.
self._lr_scheduler = lr_scheduler.build_lr_scheduler(self.args, self.optimizer) self._lr_scheduler = lr_scheduler.build_lr_scheduler(self.args, self.optimizer)
self._lr_scheduler.step_update(0)
def save_checkpoint(self, filename, extra_state): def save_checkpoint(self, filename, extra_state):
"""Save all training state in a checkpoint file.""" """Save all training state in a checkpoint file."""
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment