Commit a7b7c0b5 authored by Abdullah Rashwan's avatar Abdullah Rashwan Committed by A. Unique TensorFlower
Browse files

Internal change

PiperOrigin-RevId: 454254787
parent ec31b3b9
# Copyright 2022 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for factory.py."""
from absl.testing import parameterized
import numpy as np
import tensorflow as tf
from tensorflow.python.distribute import combinations
from official.vision.beta.projects.panoptic_maskrcnn.configs import panoptic_deeplab as panoptic_deeplab_cfg
from official.vision.beta.projects.panoptic_maskrcnn.configs import panoptic_maskrcnn as panoptic_maskrcnn_cfg
from official.vision.beta.projects.panoptic_maskrcnn.modeling import factory
from official.vision.configs import backbones
from official.vision.configs import decoders
from official.vision.configs import semantic_segmentation
class PanopticMaskRCNNBuilderTest(parameterized.TestCase, tf.test.TestCase):
@parameterized.parameters(
('resnet', (640, 640), 'dilated_resnet', 'fpn'),
('resnet', (640, 640), 'dilated_resnet', 'aspp'),
('resnet', (640, 640), None, 'fpn'),
('resnet', (640, 640), None, 'aspp'),
('resnet', (640, 640), None, None),
('resnet', (None, None), 'dilated_resnet', 'fpn'),
('resnet', (None, None), 'dilated_resnet', 'aspp'),
('resnet', (None, None), None, 'fpn'),
('resnet', (None, None), None, 'aspp'),
('resnet', (None, None), None, None))
def test_builder(self, backbone_type, input_size, segmentation_backbone_type,
segmentation_decoder_type):
num_classes = 2
input_specs = tf.keras.layers.InputSpec(
shape=[None, input_size[0], input_size[1], 3])
segmentation_output_stride = 16
level = int(np.math.log2(segmentation_output_stride))
segmentation_model = semantic_segmentation.SemanticSegmentationModel(
num_classes=2,
backbone=backbones.Backbone(type=segmentation_backbone_type),
decoder=decoders.Decoder(type=segmentation_decoder_type),
head=semantic_segmentation.SegmentationHead(level=level))
model_config = panoptic_maskrcnn_cfg.PanopticMaskRCNN(
num_classes=num_classes,
segmentation_model=segmentation_model,
backbone=backbones.Backbone(type=backbone_type),
shared_backbone=segmentation_backbone_type is None,
shared_decoder=segmentation_decoder_type is None)
l2_regularizer = tf.keras.regularizers.l2(5e-5)
_ = factory.build_panoptic_maskrcnn(
input_specs=input_specs,
model_config=model_config,
l2_regularizer=l2_regularizer)
class PanopticDeeplabBuilderTest(parameterized.TestCase, tf.test.TestCase):
@combinations.generate(
combinations.combine(
input_size=[(640, 640), (512, 512)],
backbone_type=['resnet', 'dilated_resnet'],
decoder_type=['aspp', 'fpn'],
level=[2, 3, 4],
low_level=[(4, 3), (3, 2)],
shared_decoder=[True, False],
generate_panoptic_masks=[True, False]))
def test_builder(self, input_size, backbone_type,
level, low_level, decoder_type,
shared_decoder, generate_panoptic_masks):
num_classes = 10
input_specs = tf.keras.layers.InputSpec(
shape=[None, input_size[0], input_size[1], 3])
model_config = panoptic_deeplab_cfg.PanopticDeeplab(
num_classes=num_classes,
input_size=input_size,
backbone=backbones.Backbone(type=backbone_type),
decoder=decoders.Decoder(type=decoder_type),
semantic_head=panoptic_deeplab_cfg.SemanticHead(
level=level,
num_convs=1,
kernel_size=5,
prediction_kernel_size=1,
low_level=low_level),
instance_head=panoptic_deeplab_cfg.InstanceHead(
level=level,
num_convs=1,
kernel_size=5,
prediction_kernel_size=1,
low_level=low_level),
shared_decoder=shared_decoder,
generate_panoptic_masks=generate_panoptic_masks)
l2_regularizer = tf.keras.regularizers.l2(5e-5)
_ = factory.build_panoptic_deeplab(
input_specs=input_specs,
model_config=model_config,
l2_regularizer=l2_regularizer)
if __name__ == '__main__':
tf.test.main()
# Copyright 2022 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for panoptic_deeplab.py."""
import os
from absl.testing import parameterized
import tensorflow as tf
from official.vision.beta.projects.panoptic_maskrcnn.configs import panoptic_deeplab as cfg
from official.vision.beta.projects.panoptic_maskrcnn.tasks import panoptic_deeplab
# TODO(b/234636381): add unit test for train and validation step
class PanopticDeeplabTaskTest(tf.test.TestCase, parameterized.TestCase):
@parameterized.parameters(
(['all'], False),
(['backbone'], False),
(['decoder'], False),
(['decoder'], True))
def test_model_initializing(self, init_checkpoint_modules, shared_decoder):
task_config = cfg.PanopticDeeplabTask(
model=cfg.PanopticDeeplab(
num_classes=10,
input_size=[640, 640, 3],
shared_decoder=shared_decoder))
task = panoptic_deeplab.PanopticDeeplabTask(task_config)
model = task.build_model()
ckpt = tf.train.Checkpoint(**model.checkpoint_items)
ckpt_save_dir = self.create_tempdir().full_path
ckpt.save(os.path.join(ckpt_save_dir, 'ckpt'))
task._task_config.init_checkpoint = ckpt_save_dir
task._task_config.init_checkpoint_modules = init_checkpoint_modules
task.initialize(model)
@parameterized.parameters(
(True,),
(False,))
def test_build_metrics(self, training):
task_config = cfg.PanopticDeeplabTask(
model=cfg.PanopticDeeplab(
num_classes=10,
input_size=[640, 640, 3],
shared_decoder=False))
task = panoptic_deeplab.PanopticDeeplabTask(task_config)
metrics = task.build_metrics(training=training)
if training:
expected_metric_names = {
'total_loss',
'segmentation_loss',
'instance_center_heatmap_loss',
'instance_center_offset_loss',
'model_loss'}
self.assertEqual(
expected_metric_names,
set([metric.name for metric in metrics]))
else:
assert hasattr(task, 'perclass_iou_metric')
assert hasattr(task, 'panoptic_quality_metric')
if __name__ == '__main__':
tf.test.main()
# Copyright 2022 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for panoptic_maskrcnn.py."""
import os
from absl.testing import parameterized
import tensorflow as tf
from official.vision.beta.projects.panoptic_maskrcnn.configs import panoptic_maskrcnn as cfg
from official.vision.beta.projects.panoptic_maskrcnn.tasks import panoptic_maskrcnn
from official.vision.configs import decoders as decoder_cfg
from official.vision.configs import semantic_segmentation as segmentation_cfg
class PanopticMaskRCNNTaskTest(tf.test.TestCase, parameterized.TestCase):
@parameterized.parameters(
(['all'],),
(['backbone'],),
(['segmentation_backbone'],),
(['segmentation_decoder'],),
(['backbone', 'segmentation_backbone'],),
(['segmentation_backbone', 'segmentation_decoder'],))
def test_model_initializing(self, init_checkpoint_modules):
shared_backbone = ('segmentation_backbone' not in init_checkpoint_modules)
shared_decoder = ('segmentation_decoder' not in init_checkpoint_modules and
shared_backbone)
task_config = cfg.PanopticMaskRCNNTask(
model=cfg.PanopticMaskRCNN(
num_classes=2,
input_size=[640, 640, 3],
segmentation_model=segmentation_cfg.SemanticSegmentationModel(
decoder=decoder_cfg.Decoder(type='fpn')),
shared_backbone=shared_backbone,
shared_decoder=shared_decoder))
task = panoptic_maskrcnn.PanopticMaskRCNNTask(task_config)
model = task.build_model()
ckpt = tf.train.Checkpoint(**model.checkpoint_items)
ckpt_save_dir = self.create_tempdir().full_path
ckpt.save(os.path.join(ckpt_save_dir, 'ckpt'))
if (init_checkpoint_modules == ['all'] or
'backbone' in init_checkpoint_modules):
task._task_config.init_checkpoint = ckpt_save_dir
if ('segmentation_backbone' in init_checkpoint_modules or
'segmentation_decoder' in init_checkpoint_modules):
task._task_config.segmentation_init_checkpoint = ckpt_save_dir
task._task_config.init_checkpoint_modules = init_checkpoint_modules
task.initialize(model)
if __name__ == '__main__':
tf.test.main()
# Copyright 2022 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test that Mask RCNN is deterministic when TF determinism is enabled."""
# pylint: disable=unused-import
from absl.testing import parameterized
import orbit
import tensorflow as tf
from official.core import exp_factory
from official.modeling import optimization
from official.vision.tasks import maskrcnn
class MaskRcnnTaskTest(parameterized.TestCase, tf.test.TestCase):
def _edit_config_for_testing(self, config):
# modify config to suit local testing
config.trainer.steps_per_loop = 1
config.task.train_data.global_batch_size = 2
config.task.model.backbone.resnet.model_id = 18
config.task.model.decoder.fpn.num_filters = 32
config.task.model.detection_generator.pre_nms_top_k = 500
config.task.model.detection_head.fc_dims = 128
if config.task.model.include_mask:
config.task.model.mask_sampler.num_sampled_masks = 10
config.task.model.mask_head.num_convs = 1
config.task.model.roi_generator.num_proposals = 100
config.task.model.roi_generator.pre_nms_top_k = 150
config.task.model.roi_generator.test_pre_nms_top_k = 150
config.task.model.roi_generator.test_num_proposals = 100
config.task.model.rpn_head.num_filters = 32
config.task.model.roi_sampler.num_sampled_rois = 200
config.task.model.input_size = [128, 128, 3]
config.trainer.train_steps = 2
config.task.train_data.shuffle_buffer_size = 2
config.task.train_data.input_path = "coco/train-00000-of-00256.tfrecord"
config.task.validation_data.global_batch_size = 2
config.task.validation_data.input_path = "coco/val-00000-of-00032.tfrecord"
def _build_and_run_model(self, config):
task = maskrcnn.MaskRCNNTask(config.task)
model = task.build_model()
train_metrics = task.build_metrics(training=True)
validation_metrics = task.build_metrics(training=False)
strategy = tf.distribute.get_strategy()
train_dataset = orbit.utils.make_distributed_dataset(
strategy, task.build_inputs, config.task.train_data)
train_iterator = iter(train_dataset)
validation_dataset = orbit.utils.make_distributed_dataset(
strategy, task.build_inputs, config.task.validation_data)
validation_iterator = iter(validation_dataset)
opt_factory = optimization.OptimizerFactory(config.trainer.optimizer_config)
optimizer = opt_factory.build_optimizer(opt_factory.build_learning_rate())
# Run training
logs = task.train_step(next(train_iterator), model, optimizer,
metrics=train_metrics)
for metric in train_metrics:
logs[metric.name] = metric.result()
# Run validation
validation_logs = task.validation_step(next(validation_iterator), model,
metrics=validation_metrics)
for metric in validation_metrics:
validation_logs[metric.name] = metric.result()
return logs, validation_logs, model.weights
@parameterized.parameters(
"fasterrcnn_resnetfpn_coco",
"maskrcnn_resnetfpn_coco",
"maskrcnn_spinenet_coco",
"cascadercnn_spinenet_coco",
)
def test_maskrcnn_task_train(self, test_config):
"""RetinaNet task test for training and val using toy configs."""
config = exp_factory.get_exp_config(test_config)
self._edit_config_for_testing(config)
tf.keras.utils.set_random_seed(1)
logs1, validation_logs1, weights1 = self._build_and_run_model(config)
tf.keras.utils.set_random_seed(1)
logs2, validation_logs2, weights2 = self._build_and_run_model(config)
self.assertAllEqual(logs1["loss"], logs2["loss"])
self.assertAllEqual(logs1["total_loss"], logs2["total_loss"])
self.assertAllEqual(logs1["loss"], logs2["loss"])
self.assertAllEqual(validation_logs1["coco_metric"][1]["detection_boxes"],
validation_logs2["coco_metric"][1]["detection_boxes"])
self.assertAllEqual(validation_logs1["coco_metric"][1]["detection_scores"],
validation_logs2["coco_metric"][1]["detection_scores"])
self.assertAllEqual(validation_logs1["coco_metric"][1]["detection_classes"],
validation_logs2["coco_metric"][1]["detection_classes"])
for weight1, weight2 in zip(weights1, weights2):
self.assertAllEqual(weight1, weight2)
if __name__ == "__main__":
tf.config.experimental.enable_op_determinism()
tf.test.main()
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment