Commit 6d828342 authored by Kaushik Shivakumar's avatar Kaushik Shivakumar
Browse files

clean up hopefully for the last time :)

parent 6894a660
......@@ -218,9 +218,7 @@ def compute_box_context_attention(box_features, context_features,
attention_temperature, valid_mask,
is_training)
print("before expansion", output_features.shape)
# Expands the dimension back to match with the original feature map.
output_features = output_features[:, :, tf.newaxis, tf.newaxis, :]
print("after expansion", output_features.shape)
return output_features
\ No newline at end of file
......@@ -70,12 +70,8 @@ class AttentionBlock(tf.keras.layers.Layer):
pass
def call(self, input_features, is_training, valid_context_size):
"""Handles a call by performing attention"""
print("CALLED")
"""Handles a call by performing attention."""
input_features, context_features = input_features
print(input_features.shape)
print(context_features.shape)
_, context_size, _ = context_features.shape
valid_mask = compute_valid_mask(valid_context_size, context_size)
......@@ -116,7 +112,6 @@ class AttentionBlock(tf.keras.layers.Layer):
output_features = output_features[:, :, tf.newaxis, tf.newaxis, :]
print(output_features.shape)
return output_features
......@@ -178,9 +173,9 @@ def project_features(features, bottleneck_dimension, is_training,
Args:
features: A float Tensor of shape [batch_size, features_size,
num_features].
projection_dimension: A int32 Tensor.
bottleneck_dimension: A int32 Tensor.
is_training: A boolean Tensor (affecting batch normalization).
node: Contains a custom layer specific to the particular operation
layer: Contains a custom layer specific to the particular operation
being performed (key, value, query, features)
normalize: A boolean Tensor. If true, the output features will be l2
normalized on the last dimension.
......
......@@ -29,7 +29,6 @@ from object_detection.core import standard_fields as fields
from object_detection.meta_architectures import context_rcnn_lib, context_rcnn_lib_v2
from object_detection.meta_architectures import faster_rcnn_meta_arch
from object_detection.utils import tf_version
import tensorflow as tf
class ContextRCNNMetaArch(faster_rcnn_meta_arch.FasterRCNNMetaArch):
"""Context R-CNN Meta-architecture definition."""
......@@ -214,7 +213,6 @@ class ContextRCNNMetaArch(faster_rcnn_meta_arch.FasterRCNNMetaArch):
attention_bottleneck_dimension: A single integer. The bottleneck feature
dimension of the attention block.
attention_temperature: A single float. The attention temperature.
attention_projection_layers:
Raises:
ValueError: If `second_stage_batch_size` > `first_stage_max_proposals` at
......@@ -268,13 +266,14 @@ class ContextRCNNMetaArch(faster_rcnn_meta_arch.FasterRCNNMetaArch):
if tf_version.is_tf1():
self._context_feature_extract_fn = functools.partial(
context_rcnn_lib.compute_box_context_attention,
bottleneck_dimension=attention_bottleneck_dimension,
attention_temperature=attention_temperature,
is_training=is_training)
context_rcnn_lib.compute_box_context_attention,
bottleneck_dimension=attention_bottleneck_dimension,
attention_temperature=attention_temperature,
is_training=is_training)
else:
self._attention_block = context_rcnn_lib_v2.AttentionBlock(
attention_bottleneck_dimension, attention_temperature, freeze_batchnorm)
attention_bottleneck_dimension, attention_temperature,
freeze_batchnorm)
self._is_training = is_training
@staticmethod
......@@ -331,7 +330,6 @@ class ContextRCNNMetaArch(faster_rcnn_meta_arch.FasterRCNNMetaArch):
Returns:
A float32 Tensor with shape [K, new_height, new_width, depth].
"""
print("INSIDE META ARCH")
box_features = self._crop_and_resize_fn(
features_to_crop, proposal_boxes_normalized,
......@@ -343,16 +341,12 @@ class ContextRCNNMetaArch(faster_rcnn_meta_arch.FasterRCNNMetaArch):
context_features=context_features,
valid_context_size=valid_context_size)
else:
print("CALLING ATTENTION")
attention_features = self._attention_block([box_features, context_features], self._is_training, valid_context_size)
print(attention_features.shape)
attention_features = self._attention_block(
[box_features, context_features], self._is_training,
valid_context_size)
# Adds box features with attention features.
print("box", box_features.shape)
print("attention", attention_features.shape)
box_features += attention_features
print("after adding", box_features.shape)
flattened_feature_maps = self._flatten_first_two_dimensions(box_features)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment