You need to sign in or sign up before continuing.
Unverified Commit d751dbec authored by Hz, Ji's avatar Hz, Ji Committed by GitHub
Browse files

remove the obsolete code related to fairscale FSDP (#26651)

* remove the obsolete code related to fairscale FSDP

* apple review suggestion
parent 5fbed2d7
...@@ -1850,12 +1850,6 @@ class Trainer: ...@@ -1850,12 +1850,6 @@ class Trainer:
if is_sagemaker_mp_enabled() and args.fp16: if is_sagemaker_mp_enabled() and args.fp16:
self.optimizer.clip_master_grads(args.max_grad_norm) self.optimizer.clip_master_grads(args.max_grad_norm)
elif hasattr(self.optimizer, "clip_grad_norm"):
# Some optimizers (like the sharded optimizer) have a specific way to do gradient clipping
self.optimizer.clip_grad_norm(args.max_grad_norm)
elif hasattr(model, "clip_grad_norm_"):
# Some models (like FullyShardedDDP) have a specific way to do gradient clipping
model.clip_grad_norm_(args.max_grad_norm)
elif self.use_apex: elif self.use_apex:
# Revert to normal clipping otherwise, handling Apex or full precision # Revert to normal clipping otherwise, handling Apex or full precision
nn.utils.clip_grad_norm_( nn.utils.clip_grad_norm_(
......
...@@ -1384,10 +1384,7 @@ class TrainingArguments: ...@@ -1384,10 +1384,7 @@ class TrainingArguments:
if self.bf16: if self.bf16:
if self.half_precision_backend == "apex": if self.half_precision_backend == "apex":
raise ValueError( raise ValueError(" `--half_precision_backend apex`: GPU bf16 is not supported by apex.")
" `--half_precision_backend apex`: GPU bf16 is not supported by apex. Use"
" `--half_precision_backend cuda_amp` instead"
)
if self.lr_scheduler_type == SchedulerType.REDUCE_ON_PLATEAU: if self.lr_scheduler_type == SchedulerType.REDUCE_ON_PLATEAU:
if self.evaluation_strategy == IntervalStrategy.NO: if self.evaluation_strategy == IntervalStrategy.NO:
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment