Commit 219cbf6e authored by Myle Ott's avatar Myle Ott
Browse files

Set initial learning rate in LR schedulers by calling step_update(0) at init

parent 2af922f1
......@@ -117,6 +117,7 @@ class Trainer(object):
# We should initialize the learning rate scheduler immediately after
# building the optimizer, so that the initial learning rate is set.
self._lr_scheduler = lr_scheduler.build_lr_scheduler(self.args, self.optimizer)
self._lr_scheduler.step_update(0)
def save_checkpoint(self, filename, extra_state):
"""Save all training state in a checkpoint file."""
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment