Unverified Commit 34127b9f authored by Wenwei Zhang's avatar Wenwei Zhang Committed by GitHub
Browse files

[enhance]: show grad norm in fp16 optimizer hook (#584)

parent 71952ff3
......@@ -113,7 +113,11 @@ class Fp16OptimizerHook(OptimizerHook):
if param.grad is not None:
param.grad.div_(self.loss_scale)
if self.grad_clip is not None:
self.clip_grads(fp32_weights)
grad_norm = self.clip_grads(fp32_weights)
if grad_norm is not None:
# Add grad norm to the logger
runner.log_buffer.update({'grad_norm': float(grad_norm)},
runner.outputs['num_samples'])
# update fp32 params
runner.optimizer.step()
# copy fp32 params to the fp16 model
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment