Commit e4f508a3 authored by Binh Tang's avatar Binh Tang Committed by Facebook GitHub Bot
Browse files

[PyTorchLightning/pytorch-lightning] Rename `DDPPlugin` to `DDPStrategy` (#11142)

Summary:
### New commit log messages
  b64dea9dc Rename `DDPPlugin` to `DDPStrategy` (#11142)

Reviewed By: jjenniferdai

Differential Revision: D33259306

fbshipit-source-id: b4608c6b96b4a7977eaa4ed3f03c4b824882aef0
parent 4f487c4a
...@@ -19,7 +19,7 @@ import torch ...@@ -19,7 +19,7 @@ import torch
import torchaudio import torchaudio
from pytorch_lightning import LightningModule, Trainer from pytorch_lightning import LightningModule, Trainer
from pytorch_lightning.callbacks import ModelCheckpoint, EarlyStopping from pytorch_lightning.callbacks import ModelCheckpoint, EarlyStopping
from pytorch_lightning.plugins import DDPPlugin from pytorch_lightning.plugins import DDPStrategy
from torch import nn from torch import nn
from torch.optim.lr_scheduler import _LRScheduler from torch.optim.lr_scheduler import _LRScheduler
from torch.utils.data import DataLoader from torch.utils.data import DataLoader
...@@ -426,7 +426,7 @@ def cli_main(): ...@@ -426,7 +426,7 @@ def cli_main():
gpus=args.num_gpu, gpus=args.num_gpu,
num_nodes=args.num_node, num_nodes=args.num_node,
accelerator="ddp", accelerator="ddp",
plugins=DDPPlugin(find_unused_parameters=False), # make sure there is no unused params plugins=DDPStrategy(find_unused_parameters=False), # make sure there is no unused params
limit_train_batches=1.0, # Useful for fast experiment limit_train_batches=1.0, # Useful for fast experiment
gradient_clip_val=5.0, gradient_clip_val=5.0,
callbacks=callbacks, callbacks=callbacks,
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment