Commit 0c9253b4 authored by Rebecca Chen's avatar Rebecca Chen Committed by A. Unique TensorFlower
Browse files

Internal change

PiperOrigin-RevId: 376298252
parent b3fa67e0
......@@ -502,7 +502,7 @@ class MobileNet(tf.keras.Model):
kernel_regularizer: Optional[tf.keras.regularizers.Regularizer] = None,
bias_regularizer: Optional[tf.keras.regularizers.Regularizer] = None,
# The followings should be kept the same most of the times.
output_stride: int = None,
output_stride: Optional[int] = None,
min_depth: int = 8,
# divisible is not used in MobileNetV1.
divisible_by: int = 8,
......@@ -768,7 +768,8 @@ def build_mobilenet(
input_specs: tf.keras.layers.InputSpec,
backbone_config: hyperparams.Config,
norm_activation_config: hyperparams.Config,
l2_regularizer: tf.keras.regularizers.Regularizer = None) -> tf.keras.Model:
l2_regularizer: Optional[tf.keras.regularizers.Regularizer] = None
) -> tf.keras.Model:
"""Builds MobileNet backbone from a config."""
backbone_type = backbone_config.type
backbone_cfg = backbone_config.get()
......
......@@ -81,7 +81,7 @@ class ResNet3D(tf.keras.Model):
model_id: int,
temporal_strides: List[int],
temporal_kernel_sizes: List[Tuple[int]],
use_self_gating: List[int] = None,
use_self_gating: Optional[List[int]] = None,
input_specs: tf.keras.layers.InputSpec = layers.InputSpec(
shape=[None, None, None, None, 3]),
stem_type: str = 'v0',
......@@ -380,7 +380,8 @@ def build_resnet3d(
input_specs: tf.keras.layers.InputSpec,
backbone_config: hyperparams.Config,
norm_activation_config: hyperparams.Config,
l2_regularizer: tf.keras.regularizers.Regularizer = None) -> tf.keras.Model:
l2_regularizer: Optional[tf.keras.regularizers.Regularizer] = None
) -> tf.keras.Model:
"""Builds ResNet 3d backbone from a config."""
backbone_cfg = backbone_config.get()
......@@ -418,7 +419,8 @@ def build_resnet3d_rs(
input_specs: tf.keras.layers.InputSpec,
backbone_config: hyperparams.Config,
norm_activation_config: hyperparams.Config,
l2_regularizer: tf.keras.regularizers.Regularizer = None) -> tf.keras.Model:
l2_regularizer: Optional[tf.keras.regularizers.Regularizer] = None
) -> tf.keras.Model:
"""Builds ResNet-3D-RS backbone from a config."""
backbone_cfg = backbone_config.get()
......
......@@ -140,10 +140,10 @@ class MaskRCNNModel(tf.keras.Model):
images: tf.Tensor,
image_shape: tf.Tensor,
anchor_boxes: Optional[Mapping[str, tf.Tensor]] = None,
gt_boxes: tf.Tensor = None,
gt_classes: tf.Tensor = None,
gt_masks: tf.Tensor = None,
training: bool = None) -> Mapping[str, tf.Tensor]:
gt_boxes: Optional[tf.Tensor] = None,
gt_classes: Optional[tf.Tensor] = None,
gt_masks: Optional[tf.Tensor] = None,
training: Optional[bool] = None) -> Mapping[str, tf.Tensor]:
model_outputs = {}
# Feature extraction.
......
......@@ -27,7 +27,7 @@ class VideoClassificationModel(tf.keras.Model):
self,
backbone: tf.keras.Model,
num_classes: int,
input_specs: Mapping[str, tf.keras.layers.InputSpec] = None,
input_specs: Optional[Mapping[str, tf.keras.layers.InputSpec]] = None,
dropout_rate: float = 0.0,
aggregate_endpoints: bool = False,
kernel_initializer: str = 'random_uniform',
......
......@@ -20,7 +20,7 @@ from __future__ import division
from __future__ import print_function
import os
from typing import Any, List, MutableMapping, Text
from typing import Any, List, MutableMapping, Optional, Text
from absl import logging
import tensorflow as tf
......@@ -39,7 +39,7 @@ def get_callbacks(
initial_step: int = 0,
batch_size: int = 0,
log_steps: int = 0,
model_dir: str = None,
model_dir: Optional[str] = None,
backup_and_restore: bool = False) -> List[tf.keras.callbacks.Callback]:
"""Get all callbacks."""
model_dir = model_dir or ''
......@@ -120,7 +120,7 @@ class CustomTensorBoard(tf.keras.callbacks.TensorBoard):
def on_batch_begin(self,
epoch: int,
logs: MutableMapping[str, Any] = None) -> None:
logs: Optional[MutableMapping[str, Any]] = None) -> None:
self.step += 1
if logs is None:
logs = {}
......@@ -129,7 +129,7 @@ class CustomTensorBoard(tf.keras.callbacks.TensorBoard):
def on_epoch_begin(self,
epoch: int,
logs: MutableMapping[str, Any] = None) -> None:
logs: Optional[MutableMapping[str, Any]] = None) -> None:
if logs is None:
logs = {}
metrics = self._calculate_metrics()
......@@ -140,7 +140,7 @@ class CustomTensorBoard(tf.keras.callbacks.TensorBoard):
def on_epoch_end(self,
epoch: int,
logs: MutableMapping[str, Any] = None) -> None:
logs: Optional[MutableMapping[str, Any]] = None) -> None:
if logs is None:
logs = {}
metrics = self._calculate_metrics()
......@@ -195,13 +195,13 @@ class MovingAverageCallback(tf.keras.callbacks.Callback):
optimization.ExponentialMovingAverage)
self.model.optimizer.shadow_copy(self.model)
def on_test_begin(self, logs: MutableMapping[Text, Any] = None):
def on_test_begin(self, logs: Optional[MutableMapping[Text, Any]] = None):
self.model.optimizer.swap_weights()
def on_test_end(self, logs: MutableMapping[Text, Any] = None):
def on_test_end(self, logs: Optional[MutableMapping[Text, Any]] = None):
self.model.optimizer.swap_weights()
def on_train_end(self, logs: MutableMapping[Text, Any] = None):
def on_train_end(self, logs: Optional[MutableMapping[Text, Any]] = None):
if self.overwrite_weights_on_train_end:
self.model.optimizer.assign_average_vars(self.model.variables)
......
......@@ -280,7 +280,9 @@ class DatasetBuilder:
raise e
return self.builder_info
def build(self, strategy: tf.distribute.Strategy = None) -> tf.data.Dataset:
def build(
self,
strategy: Optional[tf.distribute.Strategy] = None) -> tf.data.Dataset:
"""Construct a dataset end-to-end and return it using an optional strategy.
Args:
......@@ -305,7 +307,8 @@ class DatasetBuilder:
def _build(
self,
input_context: tf.distribute.InputContext = None) -> tf.data.Dataset:
input_context: Optional[tf.distribute.InputContext] = None
) -> tf.data.Dataset:
"""Construct a dataset end-to-end and return it.
Args:
......
......@@ -18,7 +18,7 @@ from __future__ import division
# from __future__ import google_type_annotations
from __future__ import print_function
from typing import Any, Dict, Text
from typing import Any, Dict, Optional, Text
from absl import logging
import tensorflow as tf
......@@ -35,7 +35,7 @@ def build_optimizer(
optimizer_name: Text,
base_learning_rate: tf.keras.optimizers.schedules.LearningRateSchedule,
params: Dict[Text, Any],
model: tf.keras.Model = None):
model: Optional[tf.keras.Model] = None):
"""Build the optimizer based on name.
Args:
......@@ -124,9 +124,9 @@ def build_optimizer(
def build_learning_rate(params: base_configs.LearningRateConfig,
batch_size: int = None,
train_epochs: int = None,
train_steps: int = None):
batch_size: Optional[int] = None,
train_epochs: Optional[int] = None,
train_steps: Optional[int] = None):
"""Build the learning rate given the provided configuration."""
decay_type = params.name
base_lr = params.initial_lr
......
......@@ -329,7 +329,7 @@ def load_eval_image(filename: Text, image_size: int = IMAGE_SIZE) -> tf.Tensor:
def build_eval_dataset(filenames: List[Text],
labels: List[int] = None,
labels: Optional[List[int]] = None,
image_size: int = IMAGE_SIZE,
batch_size: int = 1) -> tf.Tensor:
"""Builds a tf.data.Dataset from a list of filenames and labels.
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment