Commit ee708859 authored by A. Unique TensorFlower's avatar A. Unique TensorFlower
Browse files

Internal change

PiperOrigin-RevId: 453487987
parent 889dc12a
...@@ -71,13 +71,15 @@ def create_optimizer(init_lr, ...@@ -71,13 +71,15 @@ def create_optimizer(init_lr,
num_warmup_steps, num_warmup_steps,
end_lr=0.0, end_lr=0.0,
optimizer_type='adamw', optimizer_type='adamw',
beta_1=0.9): beta_1=0.9,
poly_power=1.0):
"""Creates an optimizer with learning rate schedule.""" """Creates an optimizer with learning rate schedule."""
# Implements linear decay of the learning rate. # Implements linear decay of the learning rate.
lr_schedule = tf.keras.optimizers.schedules.PolynomialDecay( lr_schedule = tf.keras.optimizers.schedules.PolynomialDecay(
initial_learning_rate=init_lr, initial_learning_rate=init_lr,
decay_steps=num_train_steps, decay_steps=num_train_steps,
end_learning_rate=end_lr) end_learning_rate=end_lr,
power=poly_power)
if num_warmup_steps: if num_warmup_steps:
lr_schedule = WarmUp( lr_schedule = WarmUp(
initial_learning_rate=init_lr, initial_learning_rate=init_lr,
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment