"...git@developer.sourcefind.cn:chenpangpang/transformers.git" did not exist on "faf03541e260c27db94b114a8ab1f125c0f790bf"
Unverified Commit 4a20b7c4 authored by Stas Bekman's avatar Stas Bekman Committed by GitHub
Browse files

[trainer] no --deepspeed and --sharded_ddp together (#9712)



* no --deepspeed and --sharded_ddp together

* Update src/transformers/trainer.py
Co-authored-by: default avatarSylvain Gugger <35901082+sgugger@users.noreply.github.com>

* style
Co-authored-by: default avatarSylvain Gugger <35901082+sgugger@users.noreply.github.com>
parent 7acfa95a
...@@ -337,12 +337,15 @@ class Trainer: ...@@ -337,12 +337,15 @@ class Trainer:
# Setup Sharded DDP training # Setup Sharded DDP training
self.sharded_dpp = False self.sharded_dpp = False
if args.sharded_ddp: if args.sharded_ddp:
if args.deepspeed:
raise ValueError(
"Using --sharded_ddp together with --deepspeed is not possible, deactivate one of those flags."
)
if args.local_rank == -1: if args.local_rank == -1:
raise ValueError("Using sharded DDP only works in distributed training.") raise ValueError("Using sharded DDP only works in distributed training.")
elif not is_fairscale_available(): elif not is_fairscale_available():
raise ImportError("Sharded DDP training requires fairscale: `pip install fairscale`.") raise ImportError("Sharded DDP training requires fairscale: `pip install fairscale`.")
elif args.deepspeed:
raise ValueError("can't use --sharded_ddp together with --deepspeed.")
else: else:
self.sharded_dpp = True self.sharded_dpp = True
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment