Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
ModelZoo
ResNet50_tensorflow
Commits
3e45d52f
Commit
3e45d52f
authored
Mar 06, 2021
by
Hongkun Yu
Committed by
A. Unique TensorFlower
Mar 06, 2021
Browse files
Clean up unnecessary code in PY3
PiperOrigin-RevId: 361348924
parent
ebac9847
Changes
3
Hide whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
7 additions
and
7 deletions
+7
-7
official/modeling/optimization/ema_optimizer.py
official/modeling/optimization/ema_optimizer.py
+1
-1
official/modeling/optimization/lr_schedule.py
official/modeling/optimization/lr_schedule.py
+5
-5
official/modeling/optimization/optimizer_factory.py
official/modeling/optimization/optimizer_factory.py
+1
-1
No files found.
official/modeling/optimization/ema_optimizer.py
View file @
3e45d52f
...
...
@@ -70,7 +70,7 @@ class ExponentialMovingAverage(tf.keras.optimizers.Optimizer):
**kwargs: keyword arguments. Allowed to be {`clipnorm`,
`clipvalue`, `lr`, `decay`}.
"""
super
(
ExponentialMovingAverage
,
self
).
__init__
(
name
,
**
kwargs
)
super
().
__init__
(
name
,
**
kwargs
)
self
.
_average_decay
=
average_decay
self
.
_start_step
=
tf
.
constant
(
start_step
,
tf
.
float32
)
self
.
_dynamic_decay
=
dynamic_decay
...
...
official/modeling/optimization/lr_schedule.py
View file @
3e45d52f
...
...
@@ -44,7 +44,7 @@ class LinearWarmup(tf.keras.optimizers.schedules.LearningRateSchedule):
warmup_learning_rate: Initial learning rate for the warmup.
name: Optional, name of warmup schedule.
"""
super
(
LinearWarmup
,
self
).
__init__
()
super
().
__init__
()
self
.
_name
=
name
self
.
_after_warmup_lr_sched
=
after_warmup_lr_sched
self
.
_warmup_steps
=
warmup_steps
...
...
@@ -101,7 +101,7 @@ class PolynomialWarmUp(tf.keras.optimizers.schedules.LearningRateSchedule):
warmup_steps
:
int
,
power
:
float
=
1.0
,
name
:
str
=
"PolynomialWarmup"
):
super
(
PolynomialWarmUp
,
self
).
__init__
()
super
().
__init__
()
if
isinstance
(
after_warmup_lr_sched
,
tf
.
keras
.
optimizers
.
schedules
.
LearningRateSchedule
):
self
.
_initial_learning_rate
=
after_warmup_lr_sched
(
warmup_steps
)
...
...
@@ -174,7 +174,7 @@ class DirectPowerDecay(tf.keras.optimizers.schedules.LearningRateSchedule):
power: The order of the polynomial.
name: Optional, name of warmup schedule.
"""
super
(
DirectPowerDecay
,
self
).
__init__
()
super
().
__init__
()
self
.
_initial_learning_rate
=
initial_learning_rate
self
.
_power
=
power
self
.
_name
=
name
...
...
@@ -222,7 +222,7 @@ class PowerAndLinearDecay(tf.keras.optimizers.schedules.LearningRateSchedule):
the learning rate will be multiplied by a linear decay.
name: Optional, name of warmup schedule.
"""
super
(
PowerAndLinearDecay
,
self
).
__init__
()
super
().
__init__
()
self
.
_initial_learning_rate
=
initial_learning_rate
self
.
_total_decay_steps
=
total_decay_steps
self
.
_power
=
power
...
...
@@ -276,7 +276,7 @@ class PowerDecayWithOffset(tf.keras.optimizers.schedules.LearningRateSchedule):
pre_offset_learning_rate: The maximum learning rate we'll use.
name: Optional, name of warmup schedule.
"""
super
(
PowerDecayWithOffset
,
self
).
__init__
()
super
().
__init__
()
self
.
_initial_learning_rate
=
initial_learning_rate
self
.
_power
=
power
self
.
_offset
=
offset
...
...
official/modeling/optimization/optimizer_factory.py
View file @
3e45d52f
...
...
@@ -49,7 +49,7 @@ WARMUP_CLS = {
}
class
OptimizerFactory
(
object
)
:
class
OptimizerFactory
:
"""Optimizer factory class.
This class builds learning rate and optimizer based on an optimization config.
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment