Unverified Commit 6e535db2 authored by Federico Pozzi's avatar Federico Pozzi Committed by GitHub
Browse files

refactor: replace LambdaLR with PolynomialLR in segmentation training script (#6405)

parent 7fb8d068
...@@ -10,6 +10,7 @@ import torchvision ...@@ -10,6 +10,7 @@ import torchvision
import utils import utils
from coco_utils import get_coco from coco_utils import get_coco
from torch import nn from torch import nn
from torch.optim.lr_scheduler import PolynomialLR
from torchvision.transforms import functional as F, InterpolationMode from torchvision.transforms import functional as F, InterpolationMode
...@@ -184,8 +185,8 @@ def main(args): ...@@ -184,8 +185,8 @@ def main(args):
scaler = torch.cuda.amp.GradScaler() if args.amp else None scaler = torch.cuda.amp.GradScaler() if args.amp else None
iters_per_epoch = len(data_loader) iters_per_epoch = len(data_loader)
main_lr_scheduler = torch.optim.lr_scheduler.LambdaLR( main_lr_scheduler = PolynomialLR(
optimizer, lambda x: (1 - x / (iters_per_epoch * (args.epochs - args.lr_warmup_epochs))) ** 0.9 optimizer, total_steps=iters_per_epoch * (args.epochs - args.lr_warmup_epochs), power=0.9
) )
if args.lr_warmup_epochs > 0: if args.lr_warmup_epochs > 0:
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment