Commit f06feced authored by Deyu Fu's avatar Deyu Fu
Browse files

better print

parent c8ca4bf4
...@@ -182,7 +182,7 @@ class FP16_Optimizer(object): ...@@ -182,7 +182,7 @@ class FP16_Optimizer(object):
def _update_scale(self, skip): def _update_scale(self, skip):
if self.dynamic_loss_scale: if self.dynamic_loss_scale:
if skip: if skip:
print("grad overflow on iteration", self.cur_iter) print("\nGrad overflow on iteration", self.cur_iter)
print("Using dynamic loss scale of", self.cur_scale) print("Using dynamic loss scale of", self.cur_scale)
self.cur_scale = max(self.cur_scale/self.scale_factor, 1) self.cur_scale = max(self.cur_scale/self.scale_factor, 1)
self.last_overflow_iter = self.cur_iter self.last_overflow_iter = self.cur_iter
...@@ -191,7 +191,7 @@ class FP16_Optimizer(object): ...@@ -191,7 +191,7 @@ class FP16_Optimizer(object):
self.cur_scale *= self.scale_factor self.cur_scale *= self.scale_factor
else: else:
if skip: if skip:
print("Grad overflow on iteration", self.cur_iter) print("\nGrad overflow on iteration", self.cur_iter)
print("Using static loss scale of", self.cur_scale) print("Using static loss scale of", self.cur_scale)
self.cur_iter +=1 self.cur_iter +=1
return return
...@@ -214,4 +214,3 @@ class FP16_Optimizer(object): ...@@ -214,4 +214,3 @@ class FP16_Optimizer(object):
self.optimizer.param_groups = value self.optimizer.param_groups = value
param_groups = property(_get_param_groups, _set_param_groups) param_groups = property(_get_param_groups, _set_param_groups)
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment