Commit 1d81b877 authored by Chen Qian's avatar Chen Qian Committed by A. Unique TensorFlower
Browse files

Allow legacy optimizer in MG's optimizer factory.

PiperOrigin-RevId: 467067824
parent 4cd86d27
...@@ -214,17 +214,15 @@ class OptimizerFactory: ...@@ -214,17 +214,15 @@ class OptimizerFactory:
optimizer, **self._ema_config.as_dict()) optimizer, **self._ema_config.as_dict())
if postprocessor: if postprocessor:
optimizer = postprocessor(optimizer) optimizer = postprocessor(optimizer)
if not isinstance(optimizer, tf.keras.optimizers.Optimizer): if isinstance(optimizer, tf.keras.optimizers.Optimizer):
# tf.keras.optimizers.experimental only exist in tf-nightly. return optimizer
# The following check makes sure the function wont' break in older TF # The following check makes sure the function won't break in older TF
# version because of missing the experimental package. # version because of missing the experimental/legacy package.
if hasattr(tf.keras.optimizers, 'experimental'): if hasattr(tf.keras.optimizers, 'experimental'):
if not isinstance(optimizer, if isinstance(optimizer, tf.keras.optimizers.experimental.Optimizer):
tf.keras.optimizers.experimental.Optimizer): return optimizer
raise TypeError('OptimizerFactory.build_optimizer returning a ' if hasattr(tf.keras.optimizers, 'legacy'):
'non-optimizer object: {}'.format(optimizer)) if isinstance(optimizer, tf.keras.optimizers.legacy.Optimizer):
else: return optimizer
raise TypeError('OptimizerFactory.build_optimizer returning a ' raise TypeError('OptimizerFactory.build_optimizer returning a '
'non-optimizer object: {}'.format(optimizer)) 'non-optimizer object: {}'.format(optimizer))
return optimizer
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment