Unverified Commit cfa5a657 authored by mcarilli's avatar mcarilli Committed by GitHub
Browse files

Merge pull request #184 from justusschock/master

Add "dynamic" argument to loss scaler
parents 0c2a629d 1989a575
......@@ -13,7 +13,7 @@ class OptimWrapper(object):
self._num_loss = num_loss
self._loss_idx = 0
self._skip_next = [False] * num_loss
self._loss_scaler = [LossScaler() for _ in range(num_loss)]
self._loss_scaler = [LossScaler('dynamic') for _ in range(num_loss)]
@contextlib.contextmanager
def scale_loss(self, loss):
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment