Unverified Commit aef943ed authored by SunJong Park's avatar SunJong Park Committed by GitHub
Browse files

Merge branch 'tensorflow:master' into master

parents 67ad909d 930abe21
......@@ -18,7 +18,7 @@ from absl import app
from absl import flags
from official.common import flags as tfm_flags
from official.vision.beta import train
from official.vision import train
from official.vision.beta.projects.yolo.common import registry_imports # pylint: disable=unused-import
FLAGS = flags.FLAGS
......
......@@ -112,7 +112,7 @@ def generate_coco_panoptics_masks(segments_info, mask_path,
represent "stuff" and "things" classes respectively.
Returns:
A dict with with keys: [u'semantic_segmentation_mask', u'category_mask',
A dict with keys: [u'semantic_segmentation_mask', u'category_mask',
u'instance_mask']. The dict contains 'category_mask' and 'instance_mask'
only if `include_panoptic_eval_masks` is set to True.
"""
......
......@@ -138,7 +138,7 @@ class ExampleTask(base_task.Task):
between output from Parser and input used here.
Args:
inputs: A tuple of of input tensors of (features, labels).
inputs: A tuple of input tensors of (features, labels).
model: A tf.keras.Model instance.
optimizer: The optimizer for this training step.
metrics: A nested structure of metrics objects.
......@@ -186,7 +186,7 @@ class ExampleTask(base_task.Task):
"""Runs validatation step.
Args:
inputs: A tuple of of input tensors of (features, labels).
inputs: A tuple of input tensors of (features, labels).
model: A tf.keras.Model instance.
metrics: A nested structure of metrics objects.
......
......@@ -135,25 +135,6 @@ class NASFPN(tf.keras.Model):
self._conv_op = (tf.keras.layers.SeparableConv2D
if self._config_dict['use_separable_conv']
else tf.keras.layers.Conv2D)
if self._config_dict['use_separable_conv']:
self._conv_kwargs = {
'depthwise_initializer': tf.keras.initializers.VarianceScaling(
scale=2, mode='fan_out', distribution='untruncated_normal'),
'pointwise_initializer': tf.keras.initializers.VarianceScaling(
scale=2, mode='fan_out', distribution='untruncated_normal'),
'bias_initializer': tf.zeros_initializer(),
'depthwise_regularizer': self._config_dict['kernel_regularizer'],
'pointwise_regularizer': self._config_dict['kernel_regularizer'],
'bias_regularizer': self._config_dict['bias_regularizer'],
}
else:
self._conv_kwargs = {
'kernel_initializer': tf.keras.initializers.VarianceScaling(
scale=2, mode='fan_out', distribution='untruncated_normal'),
'bias_initializer': tf.zeros_initializer(),
'kernel_regularizer': self._config_dict['kernel_regularizer'],
'bias_regularizer': self._config_dict['bias_regularizer'],
}
self._norm_op = (tf.keras.layers.experimental.SyncBatchNormalization
if self._config_dict['use_sync_bn']
else tf.keras.layers.BatchNormalization)
......@@ -240,6 +221,28 @@ class NASFPN(tf.keras.Model):
else:
return x
@property
def _conv_kwargs(self):
if self._config_dict['use_separable_conv']:
return {
'depthwise_initializer': tf.keras.initializers.VarianceScaling(
scale=2, mode='fan_out', distribution='untruncated_normal'),
'pointwise_initializer': tf.keras.initializers.VarianceScaling(
scale=2, mode='fan_out', distribution='untruncated_normal'),
'bias_initializer': tf.zeros_initializer(),
'depthwise_regularizer': self._config_dict['kernel_regularizer'],
'pointwise_regularizer': self._config_dict['kernel_regularizer'],
'bias_regularizer': self._config_dict['bias_regularizer'],
}
else:
return {
'kernel_initializer': tf.keras.initializers.VarianceScaling(
scale=2, mode='fan_out', distribution='untruncated_normal'),
'bias_initializer': tf.zeros_initializer(),
'kernel_regularizer': self._config_dict['kernel_regularizer'],
'bias_regularizer': self._config_dict['bias_regularizer'],
}
def _global_attention(self, feat0, feat1):
m = tf.math.reduce_max(feat0, axis=[1, 2], keepdims=True)
m = tf.math.sigmoid(m)
......
......@@ -140,6 +140,9 @@ class RetinaNetHead(tf.keras.layers.Layer):
for i in range(self._config_dict['num_convs']):
if level == self._config_dict['min_level']:
cls_conv_name = 'classnet-conv_{}'.format(i)
if 'kernel_initializer' in conv_kwargs:
conv_kwargs['kernel_initializer'] = tf_utils.clone_initializer(
conv_kwargs['kernel_initializer'])
self._cls_convs.append(conv_op(name=cls_conv_name, **conv_kwargs))
cls_norm_name = 'classnet-conv-norm_{}_{}'.format(level, i)
this_level_cls_norms.append(bn_op(name=cls_norm_name, **bn_kwargs))
......@@ -170,6 +173,9 @@ class RetinaNetHead(tf.keras.layers.Layer):
for i in range(self._config_dict['num_convs']):
if level == self._config_dict['min_level']:
box_conv_name = 'boxnet-conv_{}'.format(i)
if 'kernel_initializer' in conv_kwargs:
conv_kwargs['kernel_initializer'] = tf_utils.clone_initializer(
conv_kwargs['kernel_initializer'])
self._box_convs.append(conv_op(name=box_conv_name, **conv_kwargs))
box_norm_name = 'boxnet-conv-norm_{}_{}'.format(level, i)
this_level_box_norms.append(bn_op(name=box_norm_name, **bn_kwargs))
......@@ -211,6 +217,9 @@ class RetinaNetHead(tf.keras.layers.Layer):
for i in range(self._config_dict['num_convs']):
if level == self._config_dict['min_level']:
att_conv_name = '{}-conv_{}'.format(att_name, i)
if 'kernel_initializer' in conv_kwargs:
conv_kwargs['kernel_initializer'] = tf_utils.clone_initializer(
conv_kwargs['kernel_initializer'])
att_convs_i.append(conv_op(name=att_conv_name, **conv_kwargs))
att_norm_name = '{}-conv-norm_{}_{}'.format(att_name, level, i)
this_level_att_norms.append(bn_op(name=att_norm_name, **bn_kwargs))
......@@ -436,6 +445,9 @@ class RPNHead(tf.keras.layers.Layer):
for i in range(self._config_dict['num_convs']):
if level == self._config_dict['min_level']:
conv_name = 'rpn-conv_{}'.format(i)
if 'kernel_initializer' in conv_kwargs:
conv_kwargs['kernel_initializer'] = tf_utils.clone_initializer(
conv_kwargs['kernel_initializer'])
self._convs.append(conv_op(name=conv_name, **conv_kwargs))
norm_name = 'rpn-conv-norm_{}_{}'.format(level, i)
this_level_norms.append(bn_op(name=norm_name, **bn_kwargs))
......
......@@ -133,6 +133,9 @@ class DetectionHead(tf.keras.layers.Layer):
self._conv_norms = []
for i in range(self._config_dict['num_convs']):
conv_name = 'detection-conv_{}'.format(i)
if 'kernel_initializer' in conv_kwargs:
conv_kwargs['kernel_initializer'] = tf_utils.clone_initializer(
conv_kwargs['kernel_initializer'])
self._convs.append(conv_op(name=conv_name, **conv_kwargs))
bn_name = 'detection-conv-bn_{}'.format(i)
self._conv_norms.append(bn_op(name=bn_name, **bn_kwargs))
......@@ -324,6 +327,11 @@ class MaskHead(tf.keras.layers.Layer):
self._conv_norms = []
for i in range(self._config_dict['num_convs']):
conv_name = 'mask-conv_{}'.format(i)
for initializer_name in ['kernel_initializer', 'depthwise_initializer',
'pointwise_initializer']:
if initializer_name in conv_kwargs:
conv_kwargs[initializer_name] = tf_utils.clone_initializer(
conv_kwargs[initializer_name])
self._convs.append(conv_op(name=conv_name, **conv_kwargs))
bn_name = 'mask-conv-bn_{}'.format(i)
self._conv_norms.append(bn_op(name=bn_name, **bn_kwargs))
......
......@@ -118,6 +118,9 @@ class MaskScoring(tf.keras.Model):
self._conv_norms = []
for i in range(self._config_dict['num_convs']):
conv_name = 'mask-scoring_{}'.format(i)
if 'kernel_initializer' in conv_kwargs:
conv_kwargs['kernel_initializer'] = tf_utils.clone_initializer(
conv_kwargs['kernel_initializer'])
self._convs.append(conv_op(name=conv_name, **conv_kwargs))
bn_name = 'mask-scoring-bn_{}'.format(i)
self._conv_norms.append(bn_op(name=bn_name, **bn_kwargs))
......@@ -297,15 +300,7 @@ class SegmentationHead(tf.keras.layers.Layer):
def build(self, input_shape: Union[tf.TensorShape, List[tf.TensorShape]]):
"""Creates the variables of the segmentation head."""
use_depthwise_convolution = self._config_dict['use_depthwise_convolution']
random_initializer = tf.keras.initializers.RandomNormal(stddev=0.01)
conv_op = tf.keras.layers.Conv2D
conv_kwargs = {
'kernel_size': 3 if not use_depthwise_convolution else 1,
'padding': 'same',
'use_bias': False,
'kernel_initializer': random_initializer,
'kernel_regularizer': self._config_dict['kernel_regularizer'],
}
bn_op = (tf.keras.layers.experimental.SyncBatchNormalization
if self._config_dict['use_sync_bn']
else tf.keras.layers.BatchNormalization)
......@@ -352,7 +347,8 @@ class SegmentationHead(tf.keras.layers.Layer):
kernel_size=3,
padding='same',
use_bias=False,
depthwise_initializer=random_initializer,
depthwise_initializer=tf.keras.initializers.RandomNormal(
stddev=0.01),
depthwise_regularizer=self._config_dict['kernel_regularizer'],
depth_multiplier=1))
norm_name = 'segmentation_head_depthwise_norm_{}'.format(i)
......@@ -362,7 +358,12 @@ class SegmentationHead(tf.keras.layers.Layer):
conv_op(
name=conv_name,
filters=self._config_dict['num_filters'],
**conv_kwargs))
kernel_size=3 if not use_depthwise_convolution else 1,
padding='same',
use_bias=False,
kernel_initializer=tf.keras.initializers.RandomNormal(
stddev=0.01),
kernel_regularizer=self._config_dict['kernel_regularizer']))
norm_name = 'segmentation_head_norm_{}'.format(i)
self._norms.append(bn_op(name=norm_name, **bn_kwargs))
......
......@@ -16,6 +16,8 @@
import tensorflow as tf
from official.modeling import tf_utils
class SpatialPyramidPooling(tf.keras.layers.Layer):
"""Implements the Atrous Spatial Pyramid Pooling.
......@@ -103,8 +105,10 @@ class SpatialPyramidPooling(tf.keras.layers.Layer):
conv_sequential = tf.keras.Sequential([
tf.keras.layers.Conv2D(
filters=self.output_channels, kernel_size=(1, 1),
kernel_initializer=self.kernel_initializer,
filters=self.output_channels,
kernel_size=(1, 1),
kernel_initializer=tf_utils.clone_initializer(
self.kernel_initializer),
kernel_regularizer=self.kernel_regularizer,
use_bias=False),
bn_op(
......@@ -121,21 +125,32 @@ class SpatialPyramidPooling(tf.keras.layers.Layer):
if self.use_depthwise_convolution:
leading_layers += [
tf.keras.layers.DepthwiseConv2D(
depth_multiplier=1, kernel_size=kernel_size,
padding='same', depthwise_regularizer=self.kernel_regularizer,
depthwise_initializer=self.kernel_initializer,
dilation_rate=dilation_rate, use_bias=False)
depth_multiplier=1,
kernel_size=kernel_size,
padding='same',
depthwise_regularizer=self.kernel_regularizer,
depthwise_initializer=tf_utils.clone_initializer(
self.kernel_initializer),
dilation_rate=dilation_rate,
use_bias=False)
]
kernel_size = (1, 1)
conv_sequential = tf.keras.Sequential(leading_layers + [
tf.keras.layers.Conv2D(
filters=self.output_channels, kernel_size=kernel_size,
padding='same', kernel_regularizer=self.kernel_regularizer,
kernel_initializer=self.kernel_initializer,
dilation_rate=dilation_rate, use_bias=False),
bn_op(axis=bn_axis, momentum=self.batchnorm_momentum,
epsilon=self.batchnorm_epsilon),
tf.keras.layers.Activation(self.activation)])
filters=self.output_channels,
kernel_size=kernel_size,
padding='same',
kernel_regularizer=self.kernel_regularizer,
kernel_initializer=tf_utils.clone_initializer(
self.kernel_initializer),
dilation_rate=dilation_rate,
use_bias=False),
bn_op(
axis=bn_axis,
momentum=self.batchnorm_momentum,
epsilon=self.batchnorm_epsilon),
tf.keras.layers.Activation(self.activation)
])
self.aspp_layers.append(conv_sequential)
if self.pool_kernel_size is None:
......@@ -151,7 +166,8 @@ class SpatialPyramidPooling(tf.keras.layers.Layer):
tf.keras.layers.Conv2D(
filters=self.output_channels,
kernel_size=(1, 1),
kernel_initializer=self.kernel_initializer,
kernel_initializer=tf_utils.clone_initializer(
self.kernel_initializer),
kernel_regularizer=self.kernel_regularizer,
use_bias=False),
bn_op(
......@@ -170,8 +186,10 @@ class SpatialPyramidPooling(tf.keras.layers.Layer):
self.projection = tf.keras.Sequential([
tf.keras.layers.Conv2D(
filters=self.output_channels, kernel_size=(1, 1),
kernel_initializer=self.kernel_initializer,
filters=self.output_channels,
kernel_size=(1, 1),
kernel_initializer=tf_utils.clone_initializer(
self.kernel_initializer),
kernel_regularizer=self.kernel_regularizer,
use_bias=False),
bn_op(
......@@ -179,7 +197,8 @@ class SpatialPyramidPooling(tf.keras.layers.Layer):
momentum=self.batchnorm_momentum,
epsilon=self.batchnorm_epsilon),
tf.keras.layers.Activation(self.activation),
tf.keras.layers.Dropout(rate=self.dropout)])
tf.keras.layers.Dropout(rate=self.dropout)
])
def call(self, inputs, training=None):
if training is None:
......
......@@ -141,7 +141,8 @@ class ResidualBlock(tf.keras.layers.Layer):
kernel_size=1,
strides=self._strides,
use_bias=False,
kernel_initializer=self._kernel_initializer,
kernel_initializer=tf_utils.clone_initializer(
self._kernel_initializer),
kernel_regularizer=self._kernel_regularizer,
bias_regularizer=self._bias_regularizer)
self._norm0 = self._norm(
......@@ -162,7 +163,7 @@ class ResidualBlock(tf.keras.layers.Layer):
strides=self._strides,
padding=conv1_padding,
use_bias=False,
kernel_initializer=self._kernel_initializer,
kernel_initializer=tf_utils.clone_initializer(self._kernel_initializer),
kernel_regularizer=self._kernel_regularizer,
bias_regularizer=self._bias_regularizer)
self._norm1 = self._norm(
......@@ -177,7 +178,7 @@ class ResidualBlock(tf.keras.layers.Layer):
strides=1,
padding='same',
use_bias=False,
kernel_initializer=self._kernel_initializer,
kernel_initializer=tf_utils.clone_initializer(self._kernel_initializer),
kernel_regularizer=self._kernel_regularizer,
bias_regularizer=self._bias_regularizer)
self._norm2 = self._norm(
......@@ -191,7 +192,8 @@ class ResidualBlock(tf.keras.layers.Layer):
in_filters=self._filters,
out_filters=self._filters,
se_ratio=self._se_ratio,
kernel_initializer=self._kernel_initializer,
kernel_initializer=tf_utils.clone_initializer(
self._kernel_initializer),
kernel_regularizer=self._kernel_regularizer,
bias_regularizer=self._bias_regularizer)
else:
......@@ -338,7 +340,8 @@ class BottleneckBlock(tf.keras.layers.Layer):
kernel_size=1,
strides=1,
use_bias=False,
kernel_initializer=self._kernel_initializer,
kernel_initializer=tf_utils.clone_initializer(
self._kernel_initializer),
kernel_regularizer=self._kernel_regularizer,
bias_regularizer=self._bias_regularizer)
else:
......@@ -347,7 +350,8 @@ class BottleneckBlock(tf.keras.layers.Layer):
kernel_size=1,
strides=self._strides,
use_bias=False,
kernel_initializer=self._kernel_initializer,
kernel_initializer=tf_utils.clone_initializer(
self._kernel_initializer),
kernel_regularizer=self._kernel_regularizer,
bias_regularizer=self._bias_regularizer)
......@@ -362,7 +366,7 @@ class BottleneckBlock(tf.keras.layers.Layer):
kernel_size=1,
strides=1,
use_bias=False,
kernel_initializer=self._kernel_initializer,
kernel_initializer=tf_utils.clone_initializer(self._kernel_initializer),
kernel_regularizer=self._kernel_regularizer,
bias_regularizer=self._bias_regularizer)
self._norm1 = self._norm(
......@@ -380,7 +384,7 @@ class BottleneckBlock(tf.keras.layers.Layer):
dilation_rate=self._dilation_rate,
padding='same',
use_bias=False,
kernel_initializer=self._kernel_initializer,
kernel_initializer=tf_utils.clone_initializer(self._kernel_initializer),
kernel_regularizer=self._kernel_regularizer,
bias_regularizer=self._bias_regularizer)
self._norm2 = self._norm(
......@@ -396,7 +400,7 @@ class BottleneckBlock(tf.keras.layers.Layer):
kernel_size=1,
strides=1,
use_bias=False,
kernel_initializer=self._kernel_initializer,
kernel_initializer=tf_utils.clone_initializer(self._kernel_initializer),
kernel_regularizer=self._kernel_regularizer,
bias_regularizer=self._bias_regularizer)
self._norm3 = self._norm(
......@@ -412,7 +416,8 @@ class BottleneckBlock(tf.keras.layers.Layer):
in_filters=self._filters * 4,
out_filters=self._filters * 4,
se_ratio=self._se_ratio,
kernel_initializer=self._kernel_initializer,
kernel_initializer=tf_utils.clone_initializer(
self._kernel_initializer),
kernel_regularizer=self._kernel_regularizer,
bias_regularizer=self._bias_regularizer)
else:
......@@ -616,7 +621,8 @@ class InvertedBottleneckBlock(tf.keras.layers.Layer):
strides=expand_stride,
padding='same',
use_bias=False,
kernel_initializer=self._kernel_initializer,
kernel_initializer=tf_utils.clone_initializer(
self._kernel_initializer),
kernel_regularizer=self._kernel_regularizer,
bias_regularizer=self._bias_regularizer)
self._norm0 = self._norm(
......@@ -635,7 +641,8 @@ class InvertedBottleneckBlock(tf.keras.layers.Layer):
depth_multiplier=1,
dilation_rate=self._dilation_rate,
use_bias=False,
depthwise_initializer=self._kernel_initializer,
depthwise_initializer=tf_utils.clone_initializer(
self._kernel_initializer),
depthwise_regularizer=self._depthsize_regularizer,
bias_regularizer=self._bias_regularizer)
self._norm1 = self._norm(
......@@ -657,7 +664,8 @@ class InvertedBottleneckBlock(tf.keras.layers.Layer):
se_ratio=self._se_ratio,
divisible_by=self._divisible_by,
round_down_protect=self._se_round_down_protect,
kernel_initializer=self._kernel_initializer,
kernel_initializer=tf_utils.clone_initializer(
self._kernel_initializer),
kernel_regularizer=self._kernel_regularizer,
bias_regularizer=self._bias_regularizer,
activation=self._se_inner_activation,
......@@ -672,7 +680,7 @@ class InvertedBottleneckBlock(tf.keras.layers.Layer):
strides=1,
padding='same',
use_bias=False,
kernel_initializer=self._kernel_initializer,
kernel_initializer=tf_utils.clone_initializer(self._kernel_initializer),
kernel_regularizer=self._kernel_regularizer,
bias_regularizer=self._bias_regularizer)
self._norm2 = self._norm(
......@@ -829,7 +837,7 @@ class ResidualInner(tf.keras.layers.Layer):
strides=self.strides,
use_bias=False,
padding='same',
kernel_initializer=self._kernel_initializer,
kernel_initializer=tf_utils.clone_initializer(self._kernel_initializer),
kernel_regularizer=self._kernel_regularizer)
self._batch_norm_1 = self._norm(
......@@ -843,7 +851,7 @@ class ResidualInner(tf.keras.layers.Layer):
strides=1,
use_bias=False,
padding='same',
kernel_initializer=self._kernel_initializer,
kernel_initializer=tf_utils.clone_initializer(self._kernel_initializer),
kernel_regularizer=self._kernel_regularizer)
super(ResidualInner, self).build(input_shape)
......@@ -954,7 +962,7 @@ class BottleneckResidualInner(tf.keras.layers.Layer):
strides=self.strides,
use_bias=False,
padding='same',
kernel_initializer=self._kernel_initializer,
kernel_initializer=tf_utils.clone_initializer(self._kernel_initializer),
kernel_regularizer=self._kernel_regularizer)
self._batch_norm_1 = self._norm(
axis=self._bn_axis,
......@@ -966,7 +974,7 @@ class BottleneckResidualInner(tf.keras.layers.Layer):
strides=1,
use_bias=False,
padding='same',
kernel_initializer=self._kernel_initializer,
kernel_initializer=tf_utils.clone_initializer(self._kernel_initializer),
kernel_regularizer=self._kernel_regularizer)
self._batch_norm_2 = self._norm(
axis=self._bn_axis,
......@@ -978,7 +986,7 @@ class BottleneckResidualInner(tf.keras.layers.Layer):
strides=1,
use_bias=False,
padding='same',
kernel_initializer=self._kernel_initializer,
kernel_initializer=tf_utils.clone_initializer(self._kernel_initializer),
kernel_regularizer=self._kernel_regularizer)
super(BottleneckResidualInner, self).build(input_shape)
......@@ -1286,7 +1294,7 @@ class DepthwiseSeparableConvBlock(tf.keras.layers.Layer):
padding='same',
depth_multiplier=1,
dilation_rate=self._dilation_rate,
kernel_initializer=self._kernel_initializer,
kernel_initializer=tf_utils.clone_initializer(self._kernel_initializer),
kernel_regularizer=self._depthsize_regularizer,
use_bias=False)
self._norm0 = self._norm(
......@@ -1300,7 +1308,7 @@ class DepthwiseSeparableConvBlock(tf.keras.layers.Layer):
strides=1,
padding='same',
use_bias=False,
kernel_initializer=self._kernel_initializer,
kernel_initializer=tf_utils.clone_initializer(self._kernel_initializer),
kernel_regularizer=self._kernel_regularizer)
self._norm1 = self._norm(
axis=self._bn_axis,
......@@ -1411,7 +1419,7 @@ class TuckerConvBlock(tf.keras.layers.Layer):
strides=1,
padding='same',
use_bias=False,
kernel_initializer=self._kernel_initializer,
kernel_initializer=tf_utils.clone_initializer(self._kernel_initializer),
kernel_regularizer=self._kernel_regularizer,
bias_regularizer=self._bias_regularizer)
self._norm0 = self._norm(
......@@ -1432,7 +1440,7 @@ class TuckerConvBlock(tf.keras.layers.Layer):
strides=self._strides,
padding='same',
use_bias=False,
kernel_initializer=self._kernel_initializer,
kernel_initializer=tf_utils.clone_initializer(self._kernel_initializer),
kernel_regularizer=self._kernel_regularizer,
bias_regularizer=self._bias_regularizer)
self._norm1 = self._norm(
......@@ -1449,7 +1457,7 @@ class TuckerConvBlock(tf.keras.layers.Layer):
strides=1,
padding='same',
use_bias=False,
kernel_initializer=self._kernel_initializer,
kernel_initializer=tf_utils.clone_initializer(self._kernel_initializer),
kernel_regularizer=self._kernel_regularizer,
bias_regularizer=self._bias_regularizer)
self._norm2 = self._norm(
......
......@@ -155,7 +155,7 @@ class BottleneckBlock3D(tf.keras.layers.Layer):
self._temporal_strides, self._spatial_strides, self._spatial_strides
],
use_bias=False,
kernel_initializer=self._kernel_initializer,
kernel_initializer=tf_utils.clone_initializer(self._kernel_initializer),
kernel_regularizer=self._kernel_regularizer,
bias_regularizer=self._bias_regularizer)
self._norm0 = self._norm(
......@@ -169,7 +169,7 @@ class BottleneckBlock3D(tf.keras.layers.Layer):
strides=[self._temporal_strides, 1, 1],
padding='same',
use_bias=False,
kernel_initializer=self._kernel_initializer,
kernel_initializer=tf_utils.clone_initializer(self._kernel_initializer),
kernel_regularizer=self._kernel_regularizer,
bias_regularizer=self._bias_regularizer)
self._norm1 = self._norm(
......@@ -183,7 +183,7 @@ class BottleneckBlock3D(tf.keras.layers.Layer):
strides=[1, self._spatial_strides, self._spatial_strides],
padding='same',
use_bias=False,
kernel_initializer=self._kernel_initializer,
kernel_initializer=tf_utils.clone_initializer(self._kernel_initializer),
kernel_regularizer=self._kernel_regularizer,
bias_regularizer=self._bias_regularizer)
self._norm2 = self._norm(
......@@ -197,7 +197,7 @@ class BottleneckBlock3D(tf.keras.layers.Layer):
strides=[1, 1, 1],
padding='same',
use_bias=False,
kernel_initializer=self._kernel_initializer,
kernel_initializer=tf_utils.clone_initializer(self._kernel_initializer),
kernel_regularizer=self._kernel_regularizer,
bias_regularizer=self._bias_regularizer)
self._norm3 = self._norm(
......@@ -211,7 +211,8 @@ class BottleneckBlock3D(tf.keras.layers.Layer):
out_filters=self._filters * 4,
se_ratio=self._se_ratio,
use_3d_input=True,
kernel_initializer=self._kernel_initializer,
kernel_initializer=tf_utils.clone_initializer(
self._kernel_initializer),
kernel_regularizer=self._kernel_regularizer,
bias_regularizer=self._bias_regularizer)
else:
......
......@@ -163,7 +163,7 @@ class SqueezeExcitation(tf.keras.layers.Layer):
strides=1,
padding='same',
use_bias=True,
kernel_initializer=self._kernel_initializer,
kernel_initializer=tf_utils.clone_initializer(self._kernel_initializer),
kernel_regularizer=self._kernel_regularizer,
bias_regularizer=self._bias_regularizer)
......@@ -173,7 +173,7 @@ class SqueezeExcitation(tf.keras.layers.Layer):
strides=1,
padding='same',
use_bias=True,
kernel_initializer=self._kernel_initializer,
kernel_initializer=tf_utils.clone_initializer(self._kernel_initializer),
kernel_regularizer=self._kernel_regularizer,
bias_regularizer=self._bias_regularizer)
......@@ -1155,7 +1155,7 @@ class SpatialPyramidPooling(tf.keras.layers.Layer):
conv1 = tf.keras.layers.Conv2D(
filters=self._output_channels,
kernel_size=(1, 1),
kernel_initializer=self._kernel_initializer,
kernel_initializer=tf_utils.clone_initializer(self._kernel_initializer),
kernel_regularizer=self._kernel_regularizer,
use_bias=False)
norm1 = self._bn_op(
......@@ -1175,7 +1175,8 @@ class SpatialPyramidPooling(tf.keras.layers.Layer):
kernel_size=kernel_size,
padding='same',
depthwise_regularizer=self._kernel_regularizer,
depthwise_initializer=self._kernel_initializer,
depthwise_initializer=tf_utils.clone_initializer(
self._kernel_initializer),
dilation_rate=dilation_rate,
use_bias=False)
]
......@@ -1186,7 +1187,8 @@ class SpatialPyramidPooling(tf.keras.layers.Layer):
kernel_size=kernel_size,
padding='same',
kernel_regularizer=self._kernel_regularizer,
kernel_initializer=self._kernel_initializer,
kernel_initializer=tf_utils.clone_initializer(
self._kernel_initializer),
dilation_rate=dilation_rate,
use_bias=False)
]
......@@ -1208,7 +1210,7 @@ class SpatialPyramidPooling(tf.keras.layers.Layer):
conv2 = tf.keras.layers.Conv2D(
filters=self._output_channels,
kernel_size=(1, 1),
kernel_initializer=self._kernel_initializer,
kernel_initializer=tf_utils.clone_initializer(self._kernel_initializer),
kernel_regularizer=self._kernel_regularizer,
use_bias=False)
norm2 = self._bn_op(
......@@ -1225,7 +1227,8 @@ class SpatialPyramidPooling(tf.keras.layers.Layer):
tf.keras.layers.Conv2D(
filters=self._output_channels,
kernel_size=(1, 1),
kernel_initializer=self._kernel_initializer,
kernel_initializer=tf_utils.clone_initializer(
self._kernel_initializer),
kernel_regularizer=self._kernel_regularizer,
use_bias=False),
self._bn_op(
......
......@@ -616,7 +616,7 @@ def bbox_overlap(boxes, gt_boxes):
tf.transpose(gt_invalid_mask, [0, 2, 1]))
iou = tf.where(padding_mask, -tf.ones_like(iou), iou)
# Fills -1 for for invalid (-1) boxes.
# Fills -1 for invalid (-1) boxes.
boxes_invalid_mask = tf.less(
tf.reduce_max(boxes, axis=-1, keepdims=True), 0.0)
iou = tf.where(boxes_invalid_mask, -tf.ones_like(iou), iou)
......
......@@ -218,7 +218,7 @@ class ImageClassificationTask(base_task.Task):
"""Does forward and backward.
Args:
inputs: A tuple of of input tensors of (features, labels).
inputs: A tuple of input tensors of (features, labels).
model: A tf.keras.Model instance.
optimizer: The optimizer for this training step.
metrics: A nested structure of metrics objects.
......@@ -278,7 +278,7 @@ class ImageClassificationTask(base_task.Task):
"""Runs validatation step.
Args:
inputs: A tuple of of input tensors of (features, labels).
inputs: A tuple of input tensors of (features, labels).
model: A tf.keras.Model instance.
metrics: A nested structure of metrics objects.
......
......@@ -64,7 +64,7 @@ class BalancedPositiveNegativeSampler(minibatch_sampler.MinibatchSampler):
sorted_indices_tensor: A sorted int32 tensor of shape [N] which contains
the signed indices of the examples where the sign is based on the label
value. The examples that cannot be sampled are set to 0. It samples
atmost sample_size*positive_fraction positive examples and remaining
at most sample_size*positive_fraction positive examples and remaining
from negative examples.
sample_size: Size of subsamples.
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment