Commit 3e45d52f authored by Hongkun Yu's avatar Hongkun Yu Committed by A. Unique TensorFlower
Browse files

Clean up unnecessary code in PY3

PiperOrigin-RevId: 361348924
parent ebac9847
......@@ -70,7 +70,7 @@ class ExponentialMovingAverage(tf.keras.optimizers.Optimizer):
**kwargs: keyword arguments. Allowed to be {`clipnorm`,
`clipvalue`, `lr`, `decay`}.
"""
super(ExponentialMovingAverage, self).__init__(name, **kwargs)
super().__init__(name, **kwargs)
self._average_decay = average_decay
self._start_step = tf.constant(start_step, tf.float32)
self._dynamic_decay = dynamic_decay
......
......@@ -44,7 +44,7 @@ class LinearWarmup(tf.keras.optimizers.schedules.LearningRateSchedule):
warmup_learning_rate: Initial learning rate for the warmup.
name: Optional, name of warmup schedule.
"""
super(LinearWarmup, self).__init__()
super().__init__()
self._name = name
self._after_warmup_lr_sched = after_warmup_lr_sched
self._warmup_steps = warmup_steps
......@@ -101,7 +101,7 @@ class PolynomialWarmUp(tf.keras.optimizers.schedules.LearningRateSchedule):
warmup_steps: int,
power: float = 1.0,
name: str = "PolynomialWarmup"):
super(PolynomialWarmUp, self).__init__()
super().__init__()
if isinstance(after_warmup_lr_sched,
tf.keras.optimizers.schedules.LearningRateSchedule):
self._initial_learning_rate = after_warmup_lr_sched(warmup_steps)
......@@ -174,7 +174,7 @@ class DirectPowerDecay(tf.keras.optimizers.schedules.LearningRateSchedule):
power: The order of the polynomial.
name: Optional, name of warmup schedule.
"""
super(DirectPowerDecay, self).__init__()
super().__init__()
self._initial_learning_rate = initial_learning_rate
self._power = power
self._name = name
......@@ -222,7 +222,7 @@ class PowerAndLinearDecay(tf.keras.optimizers.schedules.LearningRateSchedule):
the learning rate will be multiplied by a linear decay.
name: Optional, name of warmup schedule.
"""
super(PowerAndLinearDecay, self).__init__()
super().__init__()
self._initial_learning_rate = initial_learning_rate
self._total_decay_steps = total_decay_steps
self._power = power
......@@ -276,7 +276,7 @@ class PowerDecayWithOffset(tf.keras.optimizers.schedules.LearningRateSchedule):
pre_offset_learning_rate: The maximum learning rate we'll use.
name: Optional, name of warmup schedule.
"""
super(PowerDecayWithOffset, self).__init__()
super().__init__()
self._initial_learning_rate = initial_learning_rate
self._power = power
self._offset = offset
......
......@@ -49,7 +49,7 @@ WARMUP_CLS = {
}
class OptimizerFactory(object):
class OptimizerFactory:
"""Optimizer factory class.
This class builds learning rate and optimizer based on an optimization config.
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment