Commit 92bcecc9 authored by Jinoo Baek's avatar Jinoo Baek Committed by A. Unique TensorFlower
Browse files

Fix typo from StepConsineDecayWithOffset -> StepCosineDecayWithOffset

PiperOrigin-RevId: 428599243
parent 4f8bb5b3
......@@ -61,7 +61,7 @@ class CosineLearningRateWithLinearWarmup(
"""Class to generate learning rate tensor."""
def __init__(self, total_steps, params):
"""Creates the consine learning rate tensor with linear warmup."""
"""Creates the cosine learning rate tensor with linear warmup."""
super(CosineLearningRateWithLinearWarmup, self).__init__()
self._total_steps = total_steps
assert isinstance(params, (dict, params_dict.ParamsDict))
......
......@@ -78,7 +78,7 @@ class CosineDecayWithWarmup(tf.keras.optimizers.schedules.LearningRateSchedule):
"""Class to generate learning rate tensor."""
def __init__(self, batch_size: int, total_steps: int, warmup_steps: int):
"""Creates the consine learning rate tensor with linear warmup.
"""Creates the cosine learning rate tensor with linear warmup.
Args:
batch_size: The training batch size used in the experiment.
......
......@@ -216,14 +216,14 @@ class StepCosineLrConfig(base_config.Config):
"""Configuration for stepwise learning rate decay.
This class is a container for the piecewise cosine learning rate scheduling
configs. It will configure an instance of StepConsineDecayWithOffset keras
configs. It will configure an instance of StepCosineDecayWithOffset keras
learning rate schedule.
```python
boundaries: [100000, 110000]
values: [1.0, 0.5]
lr_decayed_fn = (
lr_schedule.StepConsineDecayWithOffset(
lr_schedule.StepCosineDecayWithOffset(
boundaries,
values))
```
......@@ -243,7 +243,7 @@ class StepCosineLrConfig(base_config.Config):
[boundaries[n], end] -> values[n+1] to 0.
offset: An int. The offset applied to steps. Defaults to 0.
"""
name: str = 'StepConsineDecayWithOffset'
name: str = 'StepCosineDecayWithOffset'
boundaries: Optional[List[int]] = None
values: Optional[List[float]] = None
offset: int = 0
......
......@@ -386,11 +386,11 @@ class PowerDecayWithOffset(tf.keras.optimizers.schedules.LearningRateSchedule):
}
class StepConsineDecayWithOffset(
class StepCosineDecayWithOffset(
tf.keras.optimizers.schedules.LearningRateSchedule):
"""Stepwise cosine learning rate decay with offset.
Learning rate is equivalent to one or more consine decay(s) starting and
Learning rate is equivalent to one or more cosine decay(s) starting and
ending at each interval.
ExampleL
......@@ -399,7 +399,7 @@ class StepConsineDecayWithOffset(
boundaries: [100000, 110000]
values: [1.0, 0.5]
lr_decayed_fn = (
lr_schedule.StepConsineDecayWithOffset(
lr_schedule.StepCosineDecayWithOffset(
boundaries,
values))
```
......@@ -412,7 +412,7 @@ class StepConsineDecayWithOffset(
boundaries,
values,
offset: int = 0,
name: str = "StepConsineDecayWithOffset"):
name: str = "StepCosineDecayWithOffset"):
"""Initialize configuration of the learning rate schedule.
Args:
......@@ -444,7 +444,7 @@ class StepConsineDecayWithOffset(
] + [0])
def __call__(self, global_step):
with tf.name_scope(self.name or "StepConsineDecayWithOffset"):
with tf.name_scope(self.name or "StepCosineDecayWithOffset"):
global_step = tf.cast(global_step - self.offset, tf.float32)
lr_levels = self.values
lr_steps = self.boundaries
......
......@@ -47,7 +47,7 @@ LR_CLS = {
'power': lr_schedule.DirectPowerDecay,
'power_linear': lr_schedule.PowerAndLinearDecay,
'power_with_offset': lr_schedule.PowerDecayWithOffset,
'step_cosine_with_offset': lr_schedule.StepConsineDecayWithOffset,
'step_cosine_with_offset': lr_schedule.StepCosineDecayWithOffset,
}
WARMUP_CLS = {
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment