"...git@developer.sourcefind.cn:chenpangpang/transformers.git" did not exist on "d0d1632958c7d543e07afc672a8501d704e5a65f"
Unverified Commit 90ed9ae2 authored by Stas Bekman's avatar Stas Bekman Committed by GitHub
Browse files

fix use_amp rename after pr 17138 (#17636)

parent c70dacde
......@@ -894,7 +894,7 @@ def extract_hyperparameters_from_trainer(trainer):
hyperparameters["num_epochs"] = trainer.args.num_train_epochs
if trainer.args.fp16:
if trainer.use_amp:
if trainer.use_cuda_amp:
hyperparameters["mixed_precision_training"] = "Native AMP"
elif trainer.use_apex:
hyperparameters["mixed_precision_training"] = f"Apex, opt level {trainer.args.fp16_opt_level}"
......
......@@ -546,7 +546,12 @@ class Trainer:
self.use_apex = True
# FP16 + model parallelism in SageMaker: gradient clipping does not work for now so we raise a helpful error.
if is_sagemaker_mp_enabled() and self.use_amp and args.max_grad_norm is not None and args.max_grad_norm > 0:
if (
is_sagemaker_mp_enabled()
and self.use_cuda_amp
and args.max_grad_norm is not None
and args.max_grad_norm > 0
):
raise ValueError(
"SageMaker Model Parallelism in mixed precision mode does not support gradient clipping yet. Pass "
"along 'max_grad_norm': 0 in your hyperparameters."
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment