Unverified Commit fe748d4a authored by pkulzc's avatar pkulzc Committed by GitHub
Browse files

Object detection changes: (#7208)

257914648  by lzc:

    Internal changes

--
257525973  by Zhichao Lu:

    Fixes bug that silently prevents checkpoints from loading when training w/ eager + functions. Also sets up scripts to run training.

--
257296614  by Zhichao Lu:

    Adding detection_features to model outputs

--
257234565  by Zhichao Lu:

    Fix wrong order of `classes_with_max_scores` in class-agnostic NMS caused by
    sorting in partitioned-NMS.

--
257232002  by ronnyvotel:

    Supporting `filter_nonoverlapping` option in np_box_list_ops.clip_to_window().

--
257198282  by Zhichao Lu:

    Adding the focal loss and l1 loss from the Objects as Points paper.

--
257089535  by Zhichao Lu:

    Create Keras based ssd + resnetv1 + fpn.

--
257087407  by Zhichao Lu:

    Make object_detection/data_decoders Python3-compatible.

--
257004582  by Zhichao Lu:

    Updates _decode_raw_data_into_masks_and_boxes to the latest binary masks-to-string encoding fo...
parent 81123ebf
......@@ -21,6 +21,7 @@ Based on PNASNet model: https://arxiv.org/abs/1712.00559
import tensorflow as tf
from object_detection.meta_architectures import faster_rcnn_meta_arch
from object_detection.utils import variables_helper
from nets.nasnet import nasnet_utils
from nets.nasnet import pnasnet
......@@ -302,7 +303,7 @@ class FasterRCNNPNASFeatureExtractor(
the model graph.
"""
variables_to_restore = {}
for variable in tf.global_variables():
for variable in variables_helper.get_global_variables_safely():
if variable.op.name.startswith(
first_stage_feature_extractor_scope):
var_name = variable.op.name.replace(
......
......@@ -25,6 +25,7 @@ class FasterRcnnResnetV1FeatureExtractorTest(tf.test.TestCase):
def _build_feature_extractor(self,
first_stage_features_stride,
activation_fn=tf.nn.relu,
architecture='resnet_v1_101'):
feature_extractor_map = {
'resnet_v1_50':
......@@ -37,6 +38,7 @@ class FasterRcnnResnetV1FeatureExtractorTest(tf.test.TestCase):
return feature_extractor_map[architecture](
is_training=False,
first_stage_features_stride=first_stage_features_stride,
activation_fn=activation_fn,
batch_norm_trainable=False,
reuse_weights=None,
weight_decay=0.0)
......@@ -132,6 +134,32 @@ class FasterRcnnResnetV1FeatureExtractorTest(tf.test.TestCase):
features_shape_out = sess.run(features_shape)
self.assertAllEqual(features_shape_out, [3, 7, 7, 2048])
def test_overwriting_activation_fn(self):
for architecture in ['resnet_v1_50', 'resnet_v1_101', 'resnet_v1_152']:
feature_extractor = self._build_feature_extractor(
first_stage_features_stride=16,
architecture=architecture,
activation_fn=tf.nn.relu6)
preprocessed_inputs = tf.random_uniform([4, 224, 224, 3],
maxval=255,
dtype=tf.float32)
rpn_feature_map, _ = feature_extractor.extract_proposal_features(
preprocessed_inputs, scope='TestStage1Scope')
_ = feature_extractor.extract_box_classifier_features(
rpn_feature_map, scope='TestStaget2Scope')
conv_ops = [
op for op in tf.get_default_graph().get_operations()
if op.type == 'Relu6'
]
op_names = [op.name for op in conv_ops]
self.assertIsNotNone(conv_ops)
self.assertIn('TestStage1Scope/resnet_v1_50/resnet_v1_50/conv1/Relu6',
op_names)
self.assertIn(
'TestStaget2Scope/resnet_v1_50/block4/unit_1/bottleneck_v1/conv1/Relu6',
op_names)
if __name__ == '__main__':
tf.test.main()
......@@ -79,14 +79,19 @@ def create_conv_block(
"""
layers = []
if use_depthwise:
layers.append(tf.keras.layers.SeparableConv2D(
depth,
[kernel_size, kernel_size],
depth_multiplier=1,
padding=padding,
strides=stride,
name=layer_name + '_depthwise_conv',
**conv_hyperparams.params()))
kwargs = conv_hyperparams.params()
# Both the regularizer and initializer apply to the depthwise layer,
# so we remap the kernel_* to depthwise_* here.
kwargs['depthwise_regularizer'] = kwargs['kernel_regularizer']
kwargs['depthwise_initializer'] = kwargs['kernel_initializer']
layers.append(
tf.keras.layers.SeparableConv2D(
depth, [kernel_size, kernel_size],
depth_multiplier=1,
padding=padding,
strides=stride,
name=layer_name + '_depthwise_conv',
**kwargs))
else:
layers.append(tf.keras.layers.Conv2D(
depth,
......
......@@ -160,7 +160,12 @@ class _LayersOverride(object):
"""
if self._conv_hyperparams:
kwargs = self._conv_hyperparams.params(**kwargs)
# Both the regularizer and initializer apply to the depthwise layer in
# MobilenetV1, so we remap the kernel_* to depthwise_* here.
kwargs['depthwise_regularizer'] = kwargs['kernel_regularizer']
kwargs['depthwise_initializer'] = kwargs['kernel_initializer']
else:
kwargs['depthwise_regularizer'] = self.regularizer
kwargs['depthwise_initializer'] = self.initializer
kwargs['padding'] = 'same'
......
This diff is collapsed.
......@@ -220,7 +220,7 @@ class SsdMobilenetV1FpnFeatureExtractorTest(
_ = feature_extractor.extract_features(preprocessed_image)
self.assertTrue(
any(op.type == 'FusedBatchNorm'
any('FusedBatchNorm' in op.type
for op in tf.get_default_graph().get_operations()))
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment