Commit 2bceb1b7 authored by Yeqing Li's avatar Yeqing Li Committed by A. Unique TensorFlower
Browse files

Move sptial_transforms to ops folder.

PiperOrigin-RevId: 275940140
parent bd0d5692
...@@ -28,7 +28,7 @@ import tensorflow.compat.v2 as tf ...@@ -28,7 +28,7 @@ import tensorflow.compat.v2 as tf
from tensorflow.python.keras import backend from tensorflow.python.keras import backend
from official.vision.detection.modeling.architecture import nn_ops from official.vision.detection.modeling.architecture import nn_ops
from official.vision.detection.utils import spatial_transform from official.vision.detection.ops import spatial_transform_ops
class Fpn(object): class Fpn(object):
...@@ -91,7 +91,7 @@ class Fpn(object): ...@@ -91,7 +91,7 @@ class Fpn(object):
# Adds top-down path. # Adds top-down path.
feats = {backbone_max_level: feats_lateral[backbone_max_level]} feats = {backbone_max_level: feats_lateral[backbone_max_level]}
for level in range(backbone_max_level - 1, self._min_level - 1, -1): for level in range(backbone_max_level - 1, self._min_level - 1, -1):
feats[level] = spatial_transform.nearest_upsampling( feats[level] = spatial_transform_ops.nearest_upsampling(
feats[level + 1], 2) + feats_lateral[level] feats[level + 1], 2) + feats_lateral[level]
# Adds post-hoc 3x3 convolution kernel. # Adds post-hoc 3x3 convolution kernel.
......
...@@ -25,7 +25,7 @@ import numpy as np ...@@ -25,7 +25,7 @@ import numpy as np
import tensorflow.compat.v2 as tf import tensorflow.compat.v2 as tf
from tensorflow.python.keras import backend from tensorflow.python.keras import backend
from official.vision.detection.modeling.architecture import nn_ops from official.vision.detection.modeling.architecture import nn_ops
from official.vision.detection.utils import spatial_transform from official.vision.detection.ops import spatial_transform_ops
class RpnHead(object): class RpnHead(object):
...@@ -542,7 +542,7 @@ class ShapemaskPriorHead(object): ...@@ -542,7 +542,7 @@ class ShapemaskPriorHead(object):
level_outer_boxes = outer_boxes / tf.pow( level_outer_boxes = outer_boxes / tf.pow(
2., tf.expand_dims(detection_prior_levels, -1)) 2., tf.expand_dims(detection_prior_levels, -1))
detection_prior_levels = tf.cast(detection_prior_levels, tf.int32) detection_prior_levels = tf.cast(detection_prior_levels, tf.int32)
uniform_priors = spatial_transform.crop_mask_in_target_box( uniform_priors = spatial_transform_ops.crop_mask_in_target_box(
tf.ones([ tf.ones([
batch_size, self._num_of_instances, self._mask_crop_size, batch_size, self._num_of_instances, self._mask_crop_size,
self._mask_crop_size self._mask_crop_size
...@@ -550,7 +550,7 @@ class ShapemaskPriorHead(object): ...@@ -550,7 +550,7 @@ class ShapemaskPriorHead(object):
# Prepare crop features. # Prepare crop features.
multi_level_features = self._get_multilevel_features(fpn_features) multi_level_features = self._get_multilevel_features(fpn_features)
crop_features = spatial_transform.single_level_feature_crop( crop_features = spatial_transform_ops.single_level_feature_crop(
multi_level_features, level_outer_boxes, detection_prior_levels, multi_level_features, level_outer_boxes, detection_prior_levels,
self._min_mask_level, self._mask_crop_size) self._min_mask_level, self._mask_crop_size)
...@@ -562,7 +562,7 @@ class ShapemaskPriorHead(object): ...@@ -562,7 +562,7 @@ class ShapemaskPriorHead(object):
batch_size, self._num_of_instances, self._mask_crop_size, batch_size, self._num_of_instances, self._mask_crop_size,
self._mask_crop_size self._mask_crop_size
]) ])
predicted_detection_priors = spatial_transform.crop_mask_in_target_box( predicted_detection_priors = spatial_transform_ops.crop_mask_in_target_box(
fused_shape_priors, boxes, outer_boxes, self._mask_crop_size) fused_shape_priors, boxes, outer_boxes, self._mask_crop_size)
predicted_detection_priors = tf.reshape( predicted_detection_priors = tf.reshape(
predicted_detection_priors, predicted_detection_priors,
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment