Commit 0325fe8f authored by Zeqiang Lai's avatar Zeqiang Lai Committed by zhe chen
Browse files

make ZeroRedundancyOptimizer compatible for torch < 1.12

parent 94586767
...@@ -37,22 +37,32 @@ def build_optimizer(config, model): ...@@ -37,22 +37,32 @@ def build_optimizer(config, model):
if use_zero: if use_zero:
print(f"\nUse Zero!") print(f"\nUse Zero!")
if opt_lower == 'sgd': if opt_lower == 'sgd':
# an ugly implementation
# this problem is fixed after torch 1.12
# https://github.com/pytorch/pytorch/issues/71347 # https://github.com/pytorch/pytorch/issues/71347
# before 1.12, we could only pass list to zero optimizer, so we first pass parameters[0] with its lr and weight decay,
# then we add other parameter via parameter group.
optimizer = ZeroRedundancyOptimizer( optimizer = ZeroRedundancyOptimizer(
parameters, parameters[0]['params'],
optimizer_class=optim.SGD, optimizer_class=optim.SGD,
momentum=config.TRAIN.OPTIMIZER.MOMENTUM, momentum=config.TRAIN.OPTIMIZER.MOMENTUM, nesterov=True,
nesterov=True, lr=parameters[0]['lr'], weight_decay=parameters[0]['weight_decay']
lr=config.TRAIN.BASE_LR, )
weight_decay=config.TRAIN.WEIGHT_DECAY) if len(parameters) > 1:
for param_group in parameters[1:]:
optimizer.add_param_group(param_group)
elif opt_lower == 'adamw': elif opt_lower == 'adamw':
optimizer = ZeroRedundancyOptimizer( optimizer = ZeroRedundancyOptimizer(
parameters, parameters[0]['params'],
optimizer_class=optim.AdamW, optimizer_class=optim.AdamW,
eps=config.TRAIN.OPTIMIZER.EPS, eps=config.TRAIN.OPTIMIZER.EPS, betas=config.TRAIN.OPTIMIZER.BETAS,
betas=config.TRAIN.OPTIMIZER.BETAS, lr=parameters[0]['lr'], weight_decay=parameters[0]['weight_decay']
lr=config.TRAIN.BASE_LR, )
weight_decay=config.TRAIN.WEIGHT_DECAY) if len(parameters) > 1:
for param_group in parameters[1:]:
optimizer.add_param_group(param_group)
else: else:
if opt_lower == 'sgd': if opt_lower == 'sgd':
optimizer = optim.SGD(parameters, optimizer = optim.SGD(parameters,
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment