Commit b363df84 authored by Rebecca Chen's avatar Rebecca Chen Committed by A. Unique TensorFlower
Browse files

Internal change

PiperOrigin-RevId: 376298243
parent 1a21d1d3
...@@ -38,7 +38,10 @@ class Task(tf.Module, metaclass=abc.ABCMeta): ...@@ -38,7 +38,10 @@ class Task(tf.Module, metaclass=abc.ABCMeta):
# Special keys in train/validate step returned logs. # Special keys in train/validate step returned logs.
loss = "loss" loss = "loss"
def __init__(self, params, logging_dir: str = None, name: str = None): def __init__(self,
params,
logging_dir: Optional[str] = None,
name: Optional[str] = None):
"""Task initialization. """Task initialization.
Args: Args:
......
...@@ -14,7 +14,7 @@ ...@@ -14,7 +14,7 @@
"""Exponential moving average optimizer.""" """Exponential moving average optimizer."""
from typing import Text, List from typing import List, Optional, Text
import tensorflow as tf import tensorflow as tf
...@@ -106,7 +106,7 @@ class ExponentialMovingAverage(tf.keras.optimizers.Optimizer): ...@@ -106,7 +106,7 @@ class ExponentialMovingAverage(tf.keras.optimizers.Optimizer):
def _create_slots(self, var_list): def _create_slots(self, var_list):
self._optimizer._create_slots(var_list=var_list) # pylint: disable=protected-access self._optimizer._create_slots(var_list=var_list) # pylint: disable=protected-access
def apply_gradients(self, grads_and_vars, name: Text = None): def apply_gradients(self, grads_and_vars, name: Optional[Text] = None):
result = self._optimizer.apply_gradients(grads_and_vars, name) result = self._optimizer.apply_gradients(grads_and_vars, name)
self.update_average(self.iterations) self.update_average(self.iterations)
return result return result
......
...@@ -13,7 +13,7 @@ ...@@ -13,7 +13,7 @@
# limitations under the License. # limitations under the License.
"""Optimizer factory class.""" """Optimizer factory class."""
from typing import Callable, Union from typing import Callable, Optional, Union
import gin import gin
import tensorflow as tf import tensorflow as tf
...@@ -134,8 +134,8 @@ class OptimizerFactory: ...@@ -134,8 +134,8 @@ class OptimizerFactory:
def build_optimizer( def build_optimizer(
self, self,
lr: Union[tf.keras.optimizers.schedules.LearningRateSchedule, float], lr: Union[tf.keras.optimizers.schedules.LearningRateSchedule, float],
postprocessor: Callable[[tf.keras.optimizers.Optimizer], postprocessor: Optional[Callable[[tf.keras.optimizers.Optimizer],
tf.keras.optimizers.Optimizer] = None): tf.keras.optimizers.Optimizer]] = None):
"""Build optimizer. """Build optimizer.
Builds optimizer from config. It takes learning rate as input, and builds Builds optimizer from config. It takes learning rate as input, and builds
......
...@@ -15,7 +15,7 @@ ...@@ -15,7 +15,7 @@
"""XLNet models.""" """XLNet models."""
# pylint: disable=g-classes-have-attributes # pylint: disable=g-classes-have-attributes
from typing import Any, Mapping, Union from typing import Any, Mapping, Optional, Union
import tensorflow as tf import tensorflow as tf
...@@ -99,7 +99,7 @@ class XLNetPretrainer(tf.keras.Model): ...@@ -99,7 +99,7 @@ class XLNetPretrainer(tf.keras.Model):
network: Union[tf.keras.layers.Layer, tf.keras.Model], network: Union[tf.keras.layers.Layer, tf.keras.Model],
mlm_activation=None, mlm_activation=None,
mlm_initializer='glorot_uniform', mlm_initializer='glorot_uniform',
name: str = None, name: Optional[str] = None,
**kwargs): **kwargs):
super().__init__(name=name, **kwargs) super().__init__(name=name, **kwargs)
self._config = { self._config = {
......
...@@ -36,7 +36,7 @@ class RetinaNetHead(tf.keras.layers.Layer): ...@@ -36,7 +36,7 @@ class RetinaNetHead(tf.keras.layers.Layer):
num_anchors_per_location: int, num_anchors_per_location: int,
num_convs: int = 4, num_convs: int = 4,
num_filters: int = 256, num_filters: int = 256,
attribute_heads: List[Dict[str, Any]] = None, attribute_heads: Optional[List[Dict[str, Any]]] = None,
use_separable_conv: bool = False, use_separable_conv: bool = False,
activation: str = 'relu', activation: str = 'relu',
use_sync_bn: bool = False, use_sync_bn: bool = False,
......
...@@ -593,7 +593,7 @@ class MultilevelDetectionGenerator(tf.keras.layers.Layer): ...@@ -593,7 +593,7 @@ class MultilevelDetectionGenerator(tf.keras.layers.Layer):
raw_scores: Mapping[str, tf.Tensor], raw_scores: Mapping[str, tf.Tensor],
anchor_boxes: tf.Tensor, anchor_boxes: tf.Tensor,
image_shape: tf.Tensor, image_shape: tf.Tensor,
raw_attributes: Mapping[str, tf.Tensor] = None): raw_attributes: Optional[Mapping[str, tf.Tensor]] = None):
"""Generates final detections. """Generates final detections.
Args: Args:
......
...@@ -411,7 +411,7 @@ class _ApplyEdgeWeight(layers.Layer): ...@@ -411,7 +411,7 @@ class _ApplyEdgeWeight(layers.Layer):
def __init__(self, def __init__(self,
weights_shape, weights_shape,
index: int = None, index: Optional[int] = None,
use_5d_mode: bool = False, use_5d_mode: bool = False,
model_edge_weights: Optional[List[Any]] = None, model_edge_weights: Optional[List[Any]] = None,
**kwargs): **kwargs):
...@@ -471,7 +471,7 @@ class _ApplyEdgeWeight(layers.Layer): ...@@ -471,7 +471,7 @@ class _ApplyEdgeWeight(layers.Layer):
def call(self, def call(self,
inputs: List[tf.Tensor], inputs: List[tf.Tensor],
training: bool = None) -> Mapping[Any, List[tf.Tensor]]: training: Optional[bool] = None) -> Mapping[Any, List[tf.Tensor]]:
use_5d_mode = self._use_5d_mode use_5d_mode = self._use_5d_mode
dtype = inputs[0].dtype dtype = inputs[0].dtype
assert len(inputs) > 1 assert len(inputs) > 1
...@@ -517,7 +517,7 @@ class _ApplyEdgeWeight(layers.Layer): ...@@ -517,7 +517,7 @@ class _ApplyEdgeWeight(layers.Layer):
def multi_connection_fusion(inputs: List[tf.Tensor], def multi_connection_fusion(inputs: List[tf.Tensor],
index: int = None, index: Optional[int] = None,
use_5d_mode: bool = False, use_5d_mode: bool = False,
model_edge_weights: Optional[List[Any]] = None): model_edge_weights: Optional[List[Any]] = None):
"""Do weighted summation of multiple different sized tensors. """Do weighted summation of multiple different sized tensors.
...@@ -893,7 +893,8 @@ class AssembleNetModel(tf.keras.Model): ...@@ -893,7 +893,8 @@ class AssembleNetModel(tf.keras.Model):
num_classes, num_classes,
num_frames: int, num_frames: int,
model_structure: List[Any], model_structure: List[Any],
input_specs: Mapping[str, tf.keras.layers.InputSpec] = None, input_specs: Optional[Mapping[str,
tf.keras.layers.InputSpec]] = None,
max_pool_preditions: bool = False, max_pool_preditions: bool = False,
**kwargs): **kwargs):
if not input_specs: if not input_specs:
...@@ -1018,7 +1019,8 @@ def build_assemblenet_v1( ...@@ -1018,7 +1019,8 @@ def build_assemblenet_v1(
input_specs: tf.keras.layers.InputSpec, input_specs: tf.keras.layers.InputSpec,
backbone_config: hyperparams.Config, backbone_config: hyperparams.Config,
norm_activation_config: hyperparams.Config, norm_activation_config: hyperparams.Config,
l2_regularizer: tf.keras.regularizers.Regularizer = None) -> tf.keras.Model: l2_regularizer: Optional[tf.keras.regularizers.Regularizer] = None
) -> tf.keras.Model:
"""Builds assemblenet backbone.""" """Builds assemblenet backbone."""
del l2_regularizer del l2_regularizer
...@@ -1058,7 +1060,7 @@ def build_assemblenet_model( ...@@ -1058,7 +1060,7 @@ def build_assemblenet_model(
input_specs: tf.keras.layers.InputSpec, input_specs: tf.keras.layers.InputSpec,
model_config: cfg.AssembleNetModel, model_config: cfg.AssembleNetModel,
num_classes: int, num_classes: int,
l2_regularizer: tf.keras.regularizers.Regularizer = None): l2_regularizer: Optional[tf.keras.regularizers.Regularizer] = None):
"""Builds assemblenet model.""" """Builds assemblenet model."""
input_specs_dict = {'image': input_specs} input_specs_dict = {'image': input_specs}
backbone = build_assemblenet_v1(input_specs, model_config.backbone, backbone = build_assemblenet_v1(input_specs, model_config.backbone,
......
...@@ -16,7 +16,7 @@ ...@@ -16,7 +16,7 @@
Reference: https://arxiv.org/pdf/2103.11511.pdf Reference: https://arxiv.org/pdf/2103.11511.pdf
""" """
from typing import Mapping from typing import Mapping, Optional
from absl import logging from absl import logging
import tensorflow as tf import tensorflow as tf
...@@ -31,14 +31,15 @@ from official.vision.beta.projects.movinet.modeling import movinet_layers ...@@ -31,14 +31,15 @@ from official.vision.beta.projects.movinet.modeling import movinet_layers
class MovinetClassifier(tf.keras.Model): class MovinetClassifier(tf.keras.Model):
"""A video classification class builder.""" """A video classification class builder."""
def __init__(self, def __init__(
self,
backbone: tf.keras.Model, backbone: tf.keras.Model,
num_classes: int, num_classes: int,
input_specs: Mapping[str, tf.keras.layers.InputSpec] = None, input_specs: Optional[Mapping[str, tf.keras.layers.InputSpec]] = None,
dropout_rate: float = 0.0, dropout_rate: float = 0.0,
kernel_initializer: str = 'HeNormal', kernel_initializer: str = 'HeNormal',
kernel_regularizer: tf.keras.regularizers.Regularizer = None, kernel_regularizer: Optional[tf.keras.regularizers.Regularizer] = None,
bias_regularizer: tf.keras.regularizers.Regularizer = None, bias_regularizer: Optional[tf.keras.regularizers.Regularizer] = None,
output_states: bool = False, output_states: bool = False,
**kwargs): **kwargs):
"""Movinet initialization function. """Movinet initialization function.
...@@ -144,7 +145,7 @@ def build_movinet_model( ...@@ -144,7 +145,7 @@ def build_movinet_model(
input_specs: tf.keras.layers.InputSpec, input_specs: tf.keras.layers.InputSpec,
model_config: cfg.MovinetModel, model_config: cfg.MovinetModel,
num_classes: int, num_classes: int,
l2_regularizer: tf.keras.regularizers.Regularizer = None): l2_regularizer: Optional[tf.keras.regularizers.Regularizer] = None):
"""Builds movinet model.""" """Builds movinet model."""
logging.info('Building movinet model with num classes: %s', num_classes) logging.info('Building movinet model with num classes: %s', num_classes)
if l2_regularizer is not None: if l2_regularizer is not None:
......
...@@ -322,19 +322,19 @@ class DistributedExecutor(object): ...@@ -322,19 +322,19 @@ class DistributedExecutor(object):
return test_step return test_step
def train(self, def train(
self,
train_input_fn: Callable[[params_dict.ParamsDict], tf.data.Dataset], train_input_fn: Callable[[params_dict.ParamsDict], tf.data.Dataset],
eval_input_fn: Callable[[params_dict.ParamsDict], eval_input_fn: Optional[Callable[[params_dict.ParamsDict],
tf.data.Dataset] = None, tf.data.Dataset]] = None,
model_dir: Text = None, model_dir: Optional[Text] = None,
total_steps: int = 1, total_steps: int = 1,
iterations_per_loop: int = 1, iterations_per_loop: int = 1,
train_metric_fn: Callable[[], Any] = None, train_metric_fn: Optional[Callable[[], Any]] = None,
eval_metric_fn: Callable[[], Any] = None, eval_metric_fn: Optional[Callable[[], Any]] = None,
summary_writer_fn: Callable[[Text, Text], summary_writer_fn: Callable[[Text, Text], SummaryWriter] = SummaryWriter,
SummaryWriter] = SummaryWriter, init_checkpoint: Optional[Callable[[tf.keras.Model], Any]] = None,
init_checkpoint: Callable[[tf.keras.Model], Any] = None, custom_callbacks: Optional[List[tf.keras.callbacks.Callback]] = None,
custom_callbacks: List[tf.keras.callbacks.Callback] = None,
continuous_eval: bool = False, continuous_eval: bool = False,
save_config: bool = True): save_config: bool = True):
"""Runs distributed training. """Runs distributed training.
...@@ -590,7 +590,7 @@ class DistributedExecutor(object): ...@@ -590,7 +590,7 @@ class DistributedExecutor(object):
eval_input_fn: Callable[[params_dict.ParamsDict], tf.data.Dataset], eval_input_fn: Callable[[params_dict.ParamsDict], tf.data.Dataset],
eval_metric_fn: Callable[[], Any], eval_metric_fn: Callable[[], Any],
total_steps: int = -1, total_steps: int = -1,
eval_timeout: int = None, eval_timeout: Optional[int] = None,
min_eval_interval: int = 180, min_eval_interval: int = 180,
summary_writer_fn: Callable[[Text, Text], SummaryWriter] = SummaryWriter): summary_writer_fn: Callable[[Text, Text], SummaryWriter] = SummaryWriter):
"""Runs distributed evaluation on model folder. """Runs distributed evaluation on model folder.
...@@ -646,7 +646,7 @@ class DistributedExecutor(object): ...@@ -646,7 +646,7 @@ class DistributedExecutor(object):
eval_input_fn: Callable[[params_dict.ParamsDict], eval_input_fn: Callable[[params_dict.ParamsDict],
tf.data.Dataset], tf.data.Dataset],
eval_metric_fn: Callable[[], Any], eval_metric_fn: Callable[[], Any],
summary_writer: SummaryWriter = None): summary_writer: Optional[SummaryWriter] = None):
"""Runs distributed evaluation on the one checkpoint. """Runs distributed evaluation on the one checkpoint.
Args: Args:
......
...@@ -160,9 +160,9 @@ def conv2d_block(inputs: tf.Tensor, ...@@ -160,9 +160,9 @@ def conv2d_block(inputs: tf.Tensor,
strides: Any = (1, 1), strides: Any = (1, 1),
use_batch_norm: bool = True, use_batch_norm: bool = True,
use_bias: bool = False, use_bias: bool = False,
activation: Any = None, activation: Optional[Any] = None,
depthwise: bool = False, depthwise: bool = False,
name: Text = None): name: Optional[Text] = None):
"""A conv2d followed by batch norm and an activation.""" """A conv2d followed by batch norm and an activation."""
batch_norm = common_modules.get_batch_norm(config.batch_norm) batch_norm = common_modules.get_batch_norm(config.batch_norm)
bn_momentum = config.bn_momentum bn_momentum = config.bn_momentum
...@@ -212,7 +212,7 @@ def conv2d_block(inputs: tf.Tensor, ...@@ -212,7 +212,7 @@ def conv2d_block(inputs: tf.Tensor,
def mb_conv_block(inputs: tf.Tensor, def mb_conv_block(inputs: tf.Tensor,
block: BlockConfig, block: BlockConfig,
config: ModelConfig, config: ModelConfig,
prefix: Text = None): prefix: Optional[Text] = None):
"""Mobile Inverted Residual Bottleneck. """Mobile Inverted Residual Bottleneck.
Args: Args:
...@@ -432,8 +432,8 @@ class EfficientNet(tf.keras.Model): ...@@ -432,8 +432,8 @@ class EfficientNet(tf.keras.Model):
""" """
def __init__(self, def __init__(self,
config: ModelConfig = None, config: Optional[ModelConfig] = None,
overrides: Dict[Text, Any] = None): overrides: Optional[Dict[Text, Any]] = None):
"""Create an EfficientNet model. """Create an EfficientNet model.
Args: Args:
...@@ -463,9 +463,9 @@ class EfficientNet(tf.keras.Model): ...@@ -463,9 +463,9 @@ class EfficientNet(tf.keras.Model):
@classmethod @classmethod
def from_name(cls, def from_name(cls,
model_name: Text, model_name: Text,
model_weights_path: Text = None, model_weights_path: Optional[Text] = None,
weights_format: Text = 'saved_model', weights_format: Text = 'saved_model',
overrides: Dict[Text, Any] = None): overrides: Optional[Dict[Text, Any]] = None):
"""Construct an EfficientNet model from a predefined model name. """Construct an EfficientNet model from a predefined model name.
E.g., `EfficientNet.from_name('efficientnet-b0')`. E.g., `EfficientNet.from_name('efficientnet-b0')`.
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment