Commit ff52d0b3 authored by Le Hou's avatar Le Hou Committed by A. Unique TensorFlower
Browse files

Internal change

PiperOrigin-RevId: 359821383
parent 35979d3b
...@@ -177,7 +177,9 @@ class DirectPowerDecay(tf.keras.optimizers.schedules.LearningRateSchedule): ...@@ -177,7 +177,9 @@ class DirectPowerDecay(tf.keras.optimizers.schedules.LearningRateSchedule):
with tf.name_scope(self._name or "DirectPowerDecay"): with tf.name_scope(self._name or "DirectPowerDecay"):
step = tf.cast(step, tf.float32) step = tf.cast(step, tf.float32)
learning_rate = self._initial_learning_rate learning_rate = self._initial_learning_rate
learning_rate *= tf.math.pow(step, self._power) # A zero `step` may cause Inf. So make `step` positive.
step_non_zero = tf.math.maximum(step, 1.0)
learning_rate *= tf.math.pow(step_non_zero, self._power)
return learning_rate return learning_rate
def get_config(self): def get_config(self):
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment