Commit 3c18ae13 authored by Xiaoliang Dai's avatar Xiaoliang Dai Committed by Facebook GitHub Bot
Browse files

Initial mae d2go commit

Summary: Pull Request resolved: https://github.com/facebookresearch/d2go/pull/218

Reviewed By: newstzpz

Differential Revision: D35084145

fbshipit-source-id: f655b1a1cfa813f99e51f2d4527a6e480ef8608d
parent 9584b934
......@@ -251,9 +251,11 @@ def adamw(cfg, model: torch.nn.Module) -> torch.optim.Optimizer:
Build an optimizer from config.
"""
params = get_optimizer_param_groups(model, cfg)
return maybe_add_gradient_clipping(cfg, torch.optim.AdamW)(
params, cfg.SOLVER.BASE_LR
optim = maybe_add_gradient_clipping(cfg, torch.optim.AdamW)(
params, cfg.SOLVER.BASE_LR, betas=cfg.SOLVER.BETAS
)
return optim
@D2GO_OPTIM_MAPPER_REGISTRY.register()
......
......@@ -70,6 +70,9 @@ def get_default_cfg(_C):
_C.SOLVER.LR_MULTIPLIER_OVERWRITE = []
_C.SOLVER.WEIGHT_DECAY_EMBED = 0.0
# Betas are used in the AdamW optimizer
_C.SOLVER.BETAS = (0.9, 0.999)
# RECOMPUTE_BOXES for LSJ Training
_C.INPUT.RECOMPUTE_BOXES = False
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment