"tests/python/vscode:/vscode.git/clone" did not exist on "e2d35f6218d1e5a59b392ddbab90c5287c166aab"
Commit aeb15613 authored by Binh Tang's avatar Binh Tang Committed by Facebook GitHub Bot
Browse files

Rename `DDPPlugin` to `DDPStrategy` (#11142)

Summary:
### New commit log messages
  b64dea9dc Rename `DDPPlugin` to `DDPStrategy` (#11142)

Reviewed By: jjenniferdai

Differential Revision: D33259306

fbshipit-source-id: b4608c6b96b4a7977eaa4ed3f03c4b824882aef0
parent 21ae9538
...@@ -25,10 +25,10 @@ from pytorch_lightning.loggers import TensorBoardLogger ...@@ -25,10 +25,10 @@ from pytorch_lightning.loggers import TensorBoardLogger
from torch.distributed import get_rank from torch.distributed import get_rank
try: try:
from pytorch_lightning.plugins import DDPPlugin from pytorch_lightning.plugins import DDPStrategy
except ImportError: except ImportError:
assert os.getenv("OSSRUN") == "1" assert os.getenv("OSSRUN") == "1"
# FIXME: DDPPlugin has been renamed to DDPStrategy, however internal version is # FIXME: DDPStrategy has been renamed to DDPStrategy, however internal version is
# not updated yet, temporally skipping the import in oss env in order to unblock # not updated yet, temporally skipping the import in oss env in order to unblock
# CI where DPP is not used. # CI where DPP is not used.
...@@ -90,7 +90,7 @@ def get_trainer_params( ...@@ -90,7 +90,7 @@ def get_trainer_params(
plugins = [] plugins = []
if accelerator: if accelerator:
plugins.append( plugins.append(
DDPPlugin(find_unused_parameters=cfg.MODEL.DDP_FIND_UNUSED_PARAMETERS) DDPStrategy(find_unused_parameters=cfg.MODEL.DDP_FIND_UNUSED_PARAMETERS)
) )
return { return {
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment