"git@developer.sourcefind.cn:chenpangpang/transformers.git" did not exist on "e70abdad1ba905e59be973a5a486eb03feae07d2"
Unverified Commit e6f211ca authored by Stas Bekman's avatar Stas Bekman Committed by GitHub
Browse files

[trainer] round numbers in trainer state (#9491)

* round numbers

* style

* round only on logging
parent 01a16840
...@@ -964,7 +964,7 @@ class Trainer: ...@@ -964,7 +964,7 @@ class Trainer:
# reset tr_loss to zero # reset tr_loss to zero
tr_loss -= tr_loss tr_loss -= tr_loss
logs["loss"] = tr_loss_scalar / (self.state.global_step - self._globalstep_last_logged) logs["loss"] = round(tr_loss_scalar / (self.state.global_step - self._globalstep_last_logged), 4)
# backward compatibility for pytorch schedulers # backward compatibility for pytorch schedulers
logs["learning_rate"] = ( logs["learning_rate"] = (
self.lr_scheduler.get_last_lr()[0] self.lr_scheduler.get_last_lr()[0]
...@@ -1168,7 +1168,7 @@ class Trainer: ...@@ -1168,7 +1168,7 @@ class Trainer:
The values to log. The values to log.
""" """
if self.state.epoch is not None: if self.state.epoch is not None:
logs["epoch"] = self.state.epoch logs["epoch"] = round(self.state.epoch, 2)
self.control = self.callback_handler.on_log(self.args, self.state, self.control, logs) self.control = self.callback_handler.on_log(self.args, self.state, self.control, logs)
output = {**logs, **{"step": self.state.global_step}} output = {**logs, **{"step": self.state.global_step}}
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment