optimization_config.py 4.41 KB
Newer Older
1
# Copyright 2021 The TensorFlow Authors. All Rights Reserved.
Abdullah Rashwan's avatar
Abdullah Rashwan committed
2
3
4
5
6
7
8
9
10
11
12
13
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
Hongkun Yu's avatar
Hongkun Yu committed
14

Abdullah Rashwan's avatar
Abdullah Rashwan committed
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
"""Dataclasses for optimization configs.

This file define the dataclass for optimization configs (OptimizationConfig).
It also has two helper functions get_optimizer_config, and get_lr_config from
an OptimizationConfig class.
"""
from typing import Optional

import dataclasses

from official.modeling.hyperparams import base_config
from official.modeling.hyperparams import oneof
from official.modeling.optimization.configs import learning_rate_config as lr_cfg
from official.modeling.optimization.configs import optimizer_config as opt_cfg


@dataclasses.dataclass
class OptimizerConfig(oneof.OneOfConfig):
  """Configuration for optimizer.

  Attributes:
    type: 'str', type of optimizer to be used, on the of fields below.
    sgd: sgd optimizer config.
    adam: adam optimizer config.
    adamw: adam with weight decay.
    lamb: lamb optimizer.
Abdullah Rashwan's avatar
Abdullah Rashwan committed
41
    rmsprop: rmsprop optimizer.
A. Unique TensorFlower's avatar
A. Unique TensorFlower committed
42
    lars: lars optimizer.
Hao Wu's avatar
Hao Wu committed
43
    adagrad: adagrad optimizer.
A. Unique TensorFlower's avatar
A. Unique TensorFlower committed
44
    slide: slide optimizer.
Abdullah Rashwan's avatar
Abdullah Rashwan committed
45
46
47
48
49
50
  """
  type: Optional[str] = None
  sgd: opt_cfg.SGDConfig = opt_cfg.SGDConfig()
  adam: opt_cfg.AdamConfig = opt_cfg.AdamConfig()
  adamw: opt_cfg.AdamWeightDecayConfig = opt_cfg.AdamWeightDecayConfig()
  lamb: opt_cfg.LAMBConfig = opt_cfg.LAMBConfig()
Abdullah Rashwan's avatar
Abdullah Rashwan committed
51
  rmsprop: opt_cfg.RMSPropConfig = opt_cfg.RMSPropConfig()
A. Unique TensorFlower's avatar
A. Unique TensorFlower committed
52
  lars: opt_cfg.LARSConfig = opt_cfg.LARSConfig()
Hao Wu's avatar
Hao Wu committed
53
  adagrad: opt_cfg.AdagradConfig = opt_cfg.AdagradConfig()
A. Unique TensorFlower's avatar
A. Unique TensorFlower committed
54
  slide: opt_cfg.SLIDEConfig = opt_cfg.SLIDEConfig()
55
  adafactor: opt_cfg.AdafactorConfig = opt_cfg.AdafactorConfig()
Abdullah Rashwan's avatar
Abdullah Rashwan committed
56
57
58
59
60
61
62


@dataclasses.dataclass
class LrConfig(oneof.OneOfConfig):
  """Configuration for lr schedule.

  Attributes:
Yuexin Wu's avatar
Yuexin Wu committed
63
    type: 'str', type of lr schedule to be used, one of the fields below.
Abdullah Rashwan's avatar
Abdullah Rashwan committed
64
    constant: constant learning rate config.
Abdullah Rashwan's avatar
Abdullah Rashwan committed
65
66
67
    stepwise: stepwise learning rate config.
    exponential: exponential learning rate config.
    polynomial: polynomial learning rate config.
Abdullah Rashwan's avatar
Abdullah Rashwan committed
68
    cosine: cosine learning rate config.
69
    power: step^power learning rate config.
Le Hou's avatar
Le Hou committed
70
71
    power_linear: learning rate config of step^power followed by
      step^power*linear.
Le Hou's avatar
Le Hou committed
72
    power_with_offset: power decay with a step offset.
Yeqing Li's avatar
Yeqing Li committed
73
    step_cosine_with_offset: Step cosine with a step offset.
Abdullah Rashwan's avatar
Abdullah Rashwan committed
74
75
  """
  type: Optional[str] = None
Abdullah Rashwan's avatar
Abdullah Rashwan committed
76
  constant: lr_cfg.ConstantLrConfig = lr_cfg.ConstantLrConfig()
Abdullah Rashwan's avatar
Abdullah Rashwan committed
77
78
79
  stepwise: lr_cfg.StepwiseLrConfig = lr_cfg.StepwiseLrConfig()
  exponential: lr_cfg.ExponentialLrConfig = lr_cfg.ExponentialLrConfig()
  polynomial: lr_cfg.PolynomialLrConfig = lr_cfg.PolynomialLrConfig()
Abdullah Rashwan's avatar
Abdullah Rashwan committed
80
  cosine: lr_cfg.CosineLrConfig = lr_cfg.CosineLrConfig()
81
  power: lr_cfg.DirectPowerLrConfig = lr_cfg.DirectPowerLrConfig()
Le Hou's avatar
Le Hou committed
82
83
  power_linear: lr_cfg.PowerAndLinearDecayLrConfig = (
      lr_cfg.PowerAndLinearDecayLrConfig())
Le Hou's avatar
Le Hou committed
84
85
  power_with_offset: lr_cfg.PowerDecayWithOffsetLrConfig = (
      lr_cfg.PowerDecayWithOffsetLrConfig())
Yeqing Li's avatar
Yeqing Li committed
86
87
  step_cosine_with_offset: lr_cfg.StepCosineLrConfig = (
      lr_cfg.StepCosineLrConfig())
Abdullah Rashwan's avatar
Abdullah Rashwan committed
88
89
90
91
92
93
94


@dataclasses.dataclass
class WarmupConfig(oneof.OneOfConfig):
  """Configuration for lr schedule.

  Attributes:
Yuexin Wu's avatar
Yuexin Wu committed
95
    type: 'str', type of warmup schedule to be used, one of the fields below.
Abdullah Rashwan's avatar
Abdullah Rashwan committed
96
    linear: linear warmup config.
Abdullah Rashwan's avatar
Abdullah Rashwan committed
97
    polynomial: polynomial warmup config.
Abdullah Rashwan's avatar
Abdullah Rashwan committed
98
99
100
  """
  type: Optional[str] = None
  linear: lr_cfg.LinearWarmupConfig = lr_cfg.LinearWarmupConfig()
Abdullah Rashwan's avatar
Abdullah Rashwan committed
101
  polynomial: lr_cfg.PolynomialWarmupConfig = lr_cfg.PolynomialWarmupConfig()
Abdullah Rashwan's avatar
Abdullah Rashwan committed
102
103
104
105
106
107
108
109


@dataclasses.dataclass
class OptimizationConfig(base_config.Config):
  """Configuration for optimizer and learning rate schedule.

  Attributes:
    optimizer: optimizer oneof config.
Hao Wu's avatar
Hao Wu committed
110
111
    ema: optional exponential moving average optimizer config, if specified, ema
      optimizer will be used.
Abdullah Rashwan's avatar
Abdullah Rashwan committed
112
113
114
115
    learning_rate: learning rate oneof config.
    warmup: warmup oneof config.
  """
  optimizer: OptimizerConfig = OptimizerConfig()
Abdullah Rashwan's avatar
Abdullah Rashwan committed
116
  ema: Optional[opt_cfg.EMAConfig] = None
Abdullah Rashwan's avatar
Abdullah Rashwan committed
117
118
  learning_rate: LrConfig = LrConfig()
  warmup: WarmupConfig = WarmupConfig()