Commit b3fa67e0 authored by Rebecca Chen's avatar Rebecca Chen Committed by A. Unique TensorFlower
Browse files

Internal change

PiperOrigin-RevId: 376298243
parent 636fd747
......@@ -38,7 +38,10 @@ class Task(tf.Module, metaclass=abc.ABCMeta):
# Special keys in train/validate step returned logs.
loss = "loss"
def __init__(self, params, logging_dir: str = None, name: str = None):
def __init__(self,
params,
logging_dir: Optional[str] = None,
name: Optional[str] = None):
"""Task initialization.
Args:
......
......@@ -14,7 +14,7 @@
"""Exponential moving average optimizer."""
from typing import Text, List
from typing import List, Optional, Text
import tensorflow as tf
......@@ -106,7 +106,7 @@ class ExponentialMovingAverage(tf.keras.optimizers.Optimizer):
def _create_slots(self, var_list):
self._optimizer._create_slots(var_list=var_list) # pylint: disable=protected-access
def apply_gradients(self, grads_and_vars, name: Text = None):
def apply_gradients(self, grads_and_vars, name: Optional[Text] = None):
result = self._optimizer.apply_gradients(grads_and_vars, name)
self.update_average(self.iterations)
return result
......
......@@ -13,7 +13,7 @@
# limitations under the License.
"""Optimizer factory class."""
from typing import Callable, Union
from typing import Callable, Optional, Union
import gin
import tensorflow as tf
......@@ -134,8 +134,8 @@ class OptimizerFactory:
def build_optimizer(
self,
lr: Union[tf.keras.optimizers.schedules.LearningRateSchedule, float],
postprocessor: Callable[[tf.keras.optimizers.Optimizer],
tf.keras.optimizers.Optimizer] = None):
postprocessor: Optional[Callable[[tf.keras.optimizers.Optimizer],
tf.keras.optimizers.Optimizer]] = None):
"""Build optimizer.
Builds optimizer from config. It takes learning rate as input, and builds
......
......@@ -15,7 +15,7 @@
"""XLNet models."""
# pylint: disable=g-classes-have-attributes
from typing import Any, Mapping, Union
from typing import Any, Mapping, Optional, Union
import tensorflow as tf
......@@ -99,7 +99,7 @@ class XLNetPretrainer(tf.keras.Model):
network: Union[tf.keras.layers.Layer, tf.keras.Model],
mlm_activation=None,
mlm_initializer='glorot_uniform',
name: str = None,
name: Optional[str] = None,
**kwargs):
super().__init__(name=name, **kwargs)
self._config = {
......
......@@ -36,7 +36,7 @@ class RetinaNetHead(tf.keras.layers.Layer):
num_anchors_per_location: int,
num_convs: int = 4,
num_filters: int = 256,
attribute_heads: List[Dict[str, Any]] = None,
attribute_heads: Optional[List[Dict[str, Any]]] = None,
use_separable_conv: bool = False,
activation: str = 'relu',
use_sync_bn: bool = False,
......
......@@ -593,7 +593,7 @@ class MultilevelDetectionGenerator(tf.keras.layers.Layer):
raw_scores: Mapping[str, tf.Tensor],
anchor_boxes: tf.Tensor,
image_shape: tf.Tensor,
raw_attributes: Mapping[str, tf.Tensor] = None):
raw_attributes: Optional[Mapping[str, tf.Tensor]] = None):
"""Generates final detections.
Args:
......
......@@ -411,7 +411,7 @@ class _ApplyEdgeWeight(layers.Layer):
def __init__(self,
weights_shape,
index: int = None,
index: Optional[int] = None,
use_5d_mode: bool = False,
model_edge_weights: Optional[List[Any]] = None,
**kwargs):
......@@ -471,7 +471,7 @@ class _ApplyEdgeWeight(layers.Layer):
def call(self,
inputs: List[tf.Tensor],
training: bool = None) -> Mapping[Any, List[tf.Tensor]]:
training: Optional[bool] = None) -> Mapping[Any, List[tf.Tensor]]:
use_5d_mode = self._use_5d_mode
dtype = inputs[0].dtype
assert len(inputs) > 1
......@@ -517,7 +517,7 @@ class _ApplyEdgeWeight(layers.Layer):
def multi_connection_fusion(inputs: List[tf.Tensor],
index: int = None,
index: Optional[int] = None,
use_5d_mode: bool = False,
model_edge_weights: Optional[List[Any]] = None):
"""Do weighted summation of multiple different sized tensors.
......@@ -893,7 +893,8 @@ class AssembleNetModel(tf.keras.Model):
num_classes,
num_frames: int,
model_structure: List[Any],
input_specs: Mapping[str, tf.keras.layers.InputSpec] = None,
input_specs: Optional[Mapping[str,
tf.keras.layers.InputSpec]] = None,
max_pool_preditions: bool = False,
**kwargs):
if not input_specs:
......@@ -1018,7 +1019,8 @@ def build_assemblenet_v1(
input_specs: tf.keras.layers.InputSpec,
backbone_config: hyperparams.Config,
norm_activation_config: hyperparams.Config,
l2_regularizer: tf.keras.regularizers.Regularizer = None) -> tf.keras.Model:
l2_regularizer: Optional[tf.keras.regularizers.Regularizer] = None
) -> tf.keras.Model:
"""Builds assemblenet backbone."""
del l2_regularizer
......@@ -1058,7 +1060,7 @@ def build_assemblenet_model(
input_specs: tf.keras.layers.InputSpec,
model_config: cfg.AssembleNetModel,
num_classes: int,
l2_regularizer: tf.keras.regularizers.Regularizer = None):
l2_regularizer: Optional[tf.keras.regularizers.Regularizer] = None):
"""Builds assemblenet model."""
input_specs_dict = {'image': input_specs}
backbone = build_assemblenet_v1(input_specs, model_config.backbone,
......
......@@ -16,7 +16,7 @@
Reference: https://arxiv.org/pdf/2103.11511.pdf
"""
from typing import Mapping
from typing import Mapping, Optional
from absl import logging
import tensorflow as tf
......@@ -31,16 +31,17 @@ from official.vision.beta.projects.movinet.modeling import movinet_layers
class MovinetClassifier(tf.keras.Model):
"""A video classification class builder."""
def __init__(self,
backbone: tf.keras.Model,
num_classes: int,
input_specs: Mapping[str, tf.keras.layers.InputSpec] = None,
dropout_rate: float = 0.0,
kernel_initializer: str = 'HeNormal',
kernel_regularizer: tf.keras.regularizers.Regularizer = None,
bias_regularizer: tf.keras.regularizers.Regularizer = None,
output_states: bool = False,
**kwargs):
def __init__(
self,
backbone: tf.keras.Model,
num_classes: int,
input_specs: Optional[Mapping[str, tf.keras.layers.InputSpec]] = None,
dropout_rate: float = 0.0,
kernel_initializer: str = 'HeNormal',
kernel_regularizer: Optional[tf.keras.regularizers.Regularizer] = None,
bias_regularizer: Optional[tf.keras.regularizers.Regularizer] = None,
output_states: bool = False,
**kwargs):
"""Movinet initialization function.
Args:
......@@ -144,7 +145,7 @@ def build_movinet_model(
input_specs: tf.keras.layers.InputSpec,
model_config: cfg.MovinetModel,
num_classes: int,
l2_regularizer: tf.keras.regularizers.Regularizer = None):
l2_regularizer: Optional[tf.keras.regularizers.Regularizer] = None):
"""Builds movinet model."""
logging.info('Building movinet model with num classes: %s', num_classes)
if l2_regularizer is not None:
......
......@@ -322,21 +322,21 @@ class DistributedExecutor(object):
return test_step
def train(self,
train_input_fn: Callable[[params_dict.ParamsDict], tf.data.Dataset],
eval_input_fn: Callable[[params_dict.ParamsDict],
tf.data.Dataset] = None,
model_dir: Text = None,
total_steps: int = 1,
iterations_per_loop: int = 1,
train_metric_fn: Callable[[], Any] = None,
eval_metric_fn: Callable[[], Any] = None,
summary_writer_fn: Callable[[Text, Text],
SummaryWriter] = SummaryWriter,
init_checkpoint: Callable[[tf.keras.Model], Any] = None,
custom_callbacks: List[tf.keras.callbacks.Callback] = None,
continuous_eval: bool = False,
save_config: bool = True):
def train(
self,
train_input_fn: Callable[[params_dict.ParamsDict], tf.data.Dataset],
eval_input_fn: Optional[Callable[[params_dict.ParamsDict],
tf.data.Dataset]] = None,
model_dir: Optional[Text] = None,
total_steps: int = 1,
iterations_per_loop: int = 1,
train_metric_fn: Optional[Callable[[], Any]] = None,
eval_metric_fn: Optional[Callable[[], Any]] = None,
summary_writer_fn: Callable[[Text, Text], SummaryWriter] = SummaryWriter,
init_checkpoint: Optional[Callable[[tf.keras.Model], Any]] = None,
custom_callbacks: Optional[List[tf.keras.callbacks.Callback]] = None,
continuous_eval: bool = False,
save_config: bool = True):
"""Runs distributed training.
Args:
......@@ -590,7 +590,7 @@ class DistributedExecutor(object):
eval_input_fn: Callable[[params_dict.ParamsDict], tf.data.Dataset],
eval_metric_fn: Callable[[], Any],
total_steps: int = -1,
eval_timeout: int = None,
eval_timeout: Optional[int] = None,
min_eval_interval: int = 180,
summary_writer_fn: Callable[[Text, Text], SummaryWriter] = SummaryWriter):
"""Runs distributed evaluation on model folder.
......@@ -646,7 +646,7 @@ class DistributedExecutor(object):
eval_input_fn: Callable[[params_dict.ParamsDict],
tf.data.Dataset],
eval_metric_fn: Callable[[], Any],
summary_writer: SummaryWriter = None):
summary_writer: Optional[SummaryWriter] = None):
"""Runs distributed evaluation on the one checkpoint.
Args:
......
......@@ -160,9 +160,9 @@ def conv2d_block(inputs: tf.Tensor,
strides: Any = (1, 1),
use_batch_norm: bool = True,
use_bias: bool = False,
activation: Any = None,
activation: Optional[Any] = None,
depthwise: bool = False,
name: Text = None):
name: Optional[Text] = None):
"""A conv2d followed by batch norm and an activation."""
batch_norm = common_modules.get_batch_norm(config.batch_norm)
bn_momentum = config.bn_momentum
......@@ -212,7 +212,7 @@ def conv2d_block(inputs: tf.Tensor,
def mb_conv_block(inputs: tf.Tensor,
block: BlockConfig,
config: ModelConfig,
prefix: Text = None):
prefix: Optional[Text] = None):
"""Mobile Inverted Residual Bottleneck.
Args:
......@@ -432,8 +432,8 @@ class EfficientNet(tf.keras.Model):
"""
def __init__(self,
config: ModelConfig = None,
overrides: Dict[Text, Any] = None):
config: Optional[ModelConfig] = None,
overrides: Optional[Dict[Text, Any]] = None):
"""Create an EfficientNet model.
Args:
......@@ -463,9 +463,9 @@ class EfficientNet(tf.keras.Model):
@classmethod
def from_name(cls,
model_name: Text,
model_weights_path: Text = None,
model_weights_path: Optional[Text] = None,
weights_format: Text = 'saved_model',
overrides: Dict[Text, Any] = None):
overrides: Optional[Dict[Text, Any]] = None):
"""Construct an EfficientNet model from a predefined model name.
E.g., `EfficientNet.from_name('efficientnet-b0')`.
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment