Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
ModelZoo
ResNet50_tensorflow
Commits
2fc25efc
Commit
2fc25efc
authored
Jan 26, 2022
by
Chen Qian
Committed by
A. Unique TensorFlower
Jan 26, 2022
Browse files
Internal change
PiperOrigin-RevId: 424391275
parent
57111ba9
Changes
3
Hide whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
29 additions
and
4 deletions
+29
-4
official/modeling/optimization/configs/optimization_config.py
...cial/modeling/optimization/configs/optimization_config.py
+2
-0
official/modeling/optimization/configs/optimizer_config.py
official/modeling/optimization/configs/optimizer_config.py
+20
-0
official/modeling/optimization/optimizer_factory.py
official/modeling/optimization/optimizer_factory.py
+7
-4
No files found.
official/modeling/optimization/configs/optimization_config.py
View file @
2fc25efc
...
@@ -45,6 +45,8 @@ class OptimizerConfig(oneof.OneOfConfig):
...
@@ -45,6 +45,8 @@ class OptimizerConfig(oneof.OneOfConfig):
"""
"""
type
:
Optional
[
str
]
=
None
type
:
Optional
[
str
]
=
None
sgd
:
opt_cfg
.
SGDConfig
=
opt_cfg
.
SGDConfig
()
sgd
:
opt_cfg
.
SGDConfig
=
opt_cfg
.
SGDConfig
()
sgd_experimental
:
opt_cfg
.
SGDExperimentalConfig
=
(
opt_cfg
.
SGDExperimentalConfig
())
adam
:
opt_cfg
.
AdamConfig
=
opt_cfg
.
AdamConfig
()
adam
:
opt_cfg
.
AdamConfig
=
opt_cfg
.
AdamConfig
()
adamw
:
opt_cfg
.
AdamWeightDecayConfig
=
opt_cfg
.
AdamWeightDecayConfig
()
adamw
:
opt_cfg
.
AdamWeightDecayConfig
=
opt_cfg
.
AdamWeightDecayConfig
()
lamb
:
opt_cfg
.
LAMBConfig
=
opt_cfg
.
LAMBConfig
()
lamb
:
opt_cfg
.
LAMBConfig
=
opt_cfg
.
LAMBConfig
()
...
...
official/modeling/optimization/configs/optimizer_config.py
View file @
2fc25efc
...
@@ -54,6 +54,26 @@ class SGDConfig(BaseOptimizerConfig):
...
@@ -54,6 +54,26 @@ class SGDConfig(BaseOptimizerConfig):
momentum
:
float
=
0.0
momentum
:
float
=
0.0
# TODO(b/216129465): Merge this config with SGDConfig after the experimental
# optimizer graduates.
@
dataclasses
.
dataclass
class
SGDExperimentalConfig
(
BaseOptimizerConfig
):
"""Configuration for SGD optimizer.
The attributes for this class matches the arguments of
`tf.keras.optimizer.experimental.SGD`.
Attributes:
name: name of the optimizer.
nesterov: nesterov for SGD optimizer.
momentum: momentum for SGD optimizer.
"""
name
:
str
=
"SGD"
nesterov
:
bool
=
False
momentum
:
float
=
0.0
jit_compile
:
bool
=
False
@
dataclasses
.
dataclass
@
dataclasses
.
dataclass
class
RMSPropConfig
(
BaseOptimizerConfig
):
class
RMSPropConfig
(
BaseOptimizerConfig
):
"""Configuration for RMSProp optimizer.
"""Configuration for RMSProp optimizer.
...
...
official/modeling/optimization/optimizer_factory.py
View file @
2fc25efc
...
@@ -18,7 +18,6 @@ from typing import Callable, Optional, Union, List, Tuple
...
@@ -18,7 +18,6 @@ from typing import Callable, Optional, Union, List, Tuple
import
gin
import
gin
import
tensorflow
as
tf
import
tensorflow
as
tf
import
tensorflow_addons.optimizers
as
tfa_optimizers
import
tensorflow_addons.optimizers
as
tfa_optimizers
from
official.modeling.optimization
import
slide_optimizer
from
official.modeling.optimization
import
slide_optimizer
from
official.modeling.optimization
import
adafactor_optimizer
from
official.modeling.optimization
import
adafactor_optimizer
from
official.modeling.optimization
import
ema_optimizer
from
official.modeling.optimization
import
ema_optimizer
...
@@ -29,6 +28,7 @@ from official.nlp import optimization as nlp_optimization
...
@@ -29,6 +28,7 @@ from official.nlp import optimization as nlp_optimization
OPTIMIZERS_CLS
=
{
OPTIMIZERS_CLS
=
{
'sgd'
:
tf
.
keras
.
optimizers
.
SGD
,
'sgd'
:
tf
.
keras
.
optimizers
.
SGD
,
'sgd_experimental'
:
tf
.
keras
.
optimizers
.
experimental
.
SGD
,
'adam'
:
tf
.
keras
.
optimizers
.
Adam
,
'adam'
:
tf
.
keras
.
optimizers
.
Adam
,
'adamw'
:
nlp_optimization
.
AdamWeightDecay
,
'adamw'
:
nlp_optimization
.
AdamWeightDecay
,
'lamb'
:
tfa_optimizers
.
LAMB
,
'lamb'
:
tfa_optimizers
.
LAMB
,
...
@@ -178,7 +178,8 @@ class OptimizerFactory:
...
@@ -178,7 +178,8 @@ class OptimizerFactory:
takes an optimizer and returns an optimizer.
takes an optimizer and returns an optimizer.
Returns:
Returns:
tf.keras.optimizers.Optimizer instance.
`tf.keras.optimizers.Optimizer` or
`tf.keras.optimizers.experimental.Optimizer` instance.
"""
"""
optimizer_dict
=
self
.
_optimizer_config
.
as_dict
()
optimizer_dict
=
self
.
_optimizer_config
.
as_dict
()
...
@@ -201,8 +202,10 @@ class OptimizerFactory:
...
@@ -201,8 +202,10 @@ class OptimizerFactory:
optimizer
,
**
self
.
_ema_config
.
as_dict
())
optimizer
,
**
self
.
_ema_config
.
as_dict
())
if
postprocessor
:
if
postprocessor
:
optimizer
=
postprocessor
(
optimizer
)
optimizer
=
postprocessor
(
optimizer
)
assert
isinstance
(
optimizer
,
tf
.
keras
.
optimizers
.
Optimizer
),
(
assert
isinstance
(
'OptimizerFactory.build_optimizer returning a non-optimizer object: '
optimizer
,
(
tf
.
keras
.
optimizers
.
Optimizer
,
tf
.
keras
.
optimizers
.
experimental
.
Optimizer
)
),
(
'OptimizerFactory.build_optimizer returning a non-optimizer object: '
'{}'
.
format
(
optimizer
))
'{}'
.
format
(
optimizer
))
return
optimizer
return
optimizer
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment