.
- """
- return {
- k: tf.compat.v1.saved_model.utils.build_tensor_info(t)
- for k, t in tensor_dict.items()
- }
-
-
-def main(argv):
- if len(argv) > 1:
- raise app.UsageError('Too many command-line arguments.')
-
- export_path = FLAGS.export_path
- if os.path.exists(export_path):
- raise ValueError('Export_path already exists.')
-
- with tf.Graph().as_default() as g, tf.compat.v1.Session(graph=g) as sess:
-
+class _ExtractModule(tf.Module):
+ """Helper module to build and save DELF model."""
+
+ def __init__(self, block3_strides, iou):
+ """Initialization of DELF model.
+
+ Args:
+ block3_strides: bool, whether to add strides to the output of block3.
+ iou: IOU for non-max suppression.
+ """
+ self._stride_factor = 2.0 if block3_strides else 1.0
+ self._iou = iou
# Setup the DELF model for extraction.
- model = delf_model.Delf(block3_strides=FLAGS.block3_strides, name='DELF')
-
- # Initial forward pass to build model.
- images = tf.zeros((1, 321, 321, 3), dtype=tf.float32)
- model(images)
+ self._model = delf_model.Delf(
+ block3_strides=block3_strides, name='DELF')
- stride_factor = 2.0 if FLAGS.block3_strides else 1.0
+ def LoadWeights(self, checkpoint_path):
+ self._model.load_weights(checkpoint_path)
- # Setup the multiscale keypoint extraction.
- input_image = tf.compat.v1.placeholder(
- tf.uint8, shape=(None, None, 3), name='input_image')
- input_abs_thres = tf.compat.v1.placeholder(
- tf.float32, shape=(), name='input_abs_thres')
- input_scales = tf.compat.v1.placeholder(
- tf.float32, shape=[None], name='input_scales')
- input_max_feature_num = tf.compat.v1.placeholder(
- tf.int32, shape=(), name='input_max_feature_num')
+ @tf.function(input_signature=[
+ tf.TensorSpec(shape=[None, None, 3], dtype=tf.uint8, name='input_image'),
+ tf.TensorSpec(shape=[None], dtype=tf.float32, name='input_scales'),
+ tf.TensorSpec(shape=(), dtype=tf.int32, name='input_max_feature_num'),
+ tf.TensorSpec(shape=(), dtype=tf.float32, name='input_abs_thres')
+ ])
+ def ExtractFeatures(self, input_image, input_scales, input_max_feature_num,
+ input_abs_thres):
extracted_features = export_model_utils.ExtractLocalFeatures(
input_image, input_scales, input_max_feature_num, input_abs_thres,
- FLAGS.iou, lambda x: model(x, training=False), stride_factor)
+ self._iou, lambda x: self._model(x, training=False),
+ self._stride_factor)
- # Load the weights.
- checkpoint_path = FLAGS.ckpt_path
- model.load_weights(checkpoint_path)
- print('Checkpoint loaded from ', checkpoint_path)
-
- named_input_tensors = {
- 'input_image': input_image,
- 'input_scales': input_scales,
- 'input_abs_thres': input_abs_thres,
- 'input_max_feature_num': input_max_feature_num,
- }
-
- # Outputs to the exported model.
named_output_tensors = {}
named_output_tensors['boxes'] = tf.identity(
extracted_features[0], name='boxes')
@@ -112,25 +84,27 @@ def main(argv):
extracted_features[2], name='scales')
named_output_tensors['scores'] = tf.identity(
extracted_features[3], name='scores')
+ return named_output_tensors
+
+
+def main(argv):
+ if len(argv) > 1:
+ raise app.UsageError('Too many command-line arguments.')
+
+ export_path = FLAGS.export_path
+ if os.path.exists(export_path):
+ raise ValueError(f'Export_path {export_path} already exists. Please '
+ 'specify a different path or delete the existing one.')
+
+ module = _ExtractModule(FLAGS.block3_strides, FLAGS.iou)
+
+ # Load the weights.
+ checkpoint_path = FLAGS.ckpt_path
+ module.LoadWeights(checkpoint_path)
+ print('Checkpoint loaded from ', checkpoint_path)
- # Export the model.
- signature_def = tf.compat.v1.saved_model.signature_def_utils.build_signature_def(
- inputs=_build_tensor_info(named_input_tensors),
- outputs=_build_tensor_info(named_output_tensors))
-
- print('Exporting trained model to:', export_path)
- builder = tf.compat.v1.saved_model.builder.SavedModelBuilder(export_path)
-
- init_op = None
- builder.add_meta_graph_and_variables(
- sess, [tf.compat.v1.saved_model.tag_constants.SERVING],
- signature_def_map={
- tf.compat.v1.saved_model.signature_constants
- .DEFAULT_SERVING_SIGNATURE_DEF_KEY:
- signature_def
- },
- main_op=init_op)
- builder.save()
+ # Save the module
+ tf.saved_model.save(module, export_path)
if __name__ == '__main__':
diff --git a/research/delf/delf/python/training/model/export_model_utils.py b/research/delf/delf/python/training/model/export_model_utils.py
index f4302aca139802e99d80bfd4e1fc27e353abdfbb..3fc18a3c280de513070f6c09612506db37f9db1a 100644
--- a/research/delf/delf/python/training/model/export_model_utils.py
+++ b/research/delf/delf/python/training/model/export_model_utils.py
@@ -142,20 +142,21 @@ def ExtractLocalFeatures(image, image_scales, max_feature_num, abs_thres, iou,
keep_going = lambda j, b, f, scales, scores: tf.less(j, num_scales)
(_, output_boxes, output_features, output_scales,
- output_scores) = tf.while_loop(
- cond=keep_going,
- body=_ProcessSingleScale,
- loop_vars=[
- i, output_boxes, output_features, output_scales, output_scores
- ],
- shape_invariants=[
- i.get_shape(),
- tf.TensorShape([None, 4]),
- tf.TensorShape([None, feature_depth]),
- tf.TensorShape([None]),
- tf.TensorShape([None])
- ],
- back_prop=False)
+ output_scores) = tf.nest.map_structure(
+ tf.stop_gradient,
+ tf.while_loop(
+ cond=keep_going,
+ body=_ProcessSingleScale,
+ loop_vars=[
+ i, output_boxes, output_features, output_scales, output_scores
+ ],
+ shape_invariants=[
+ i.get_shape(),
+ tf.TensorShape([None, 4]),
+ tf.TensorShape([None, feature_depth]),
+ tf.TensorShape([None]),
+ tf.TensorShape([None])
+ ]))
feature_boxes = box_list.BoxList(output_boxes)
feature_boxes.add_field('features', output_features)
@@ -169,3 +170,109 @@ def ExtractLocalFeatures(image, image_scales, max_feature_num, abs_thres, iou,
return final_boxes.get(), final_boxes.get_field(
'features'), final_boxes.get_field('scales'), tf.expand_dims(
final_boxes.get_field('scores'), 1)
+
+
+def ExtractGlobalFeatures(image,
+ image_scales,
+ model_fn,
+ multi_scale_pool_type='None',
+ normalize_global_descriptor=False):
+ """Extract global features for input image.
+
+ Args:
+ image: image tensor of type tf.uint8 with shape [h, w, channels].
+ image_scales: 1D float tensor which contains float scales used for image
+ pyramid construction.
+ model_fn: model function. Follows the signature:
+ * Args:
+ * `images`: Image tensor which is re-scaled.
+ * Returns:
+ * `global_descriptors`: Global descriptors for input images.
+ multi_scale_pool_type: If set, the global descriptor of each scale is pooled
+ and a 1D global descriptor is returned.
+ normalize_global_descriptor: If True, output global descriptors are
+ L2-normalized.
+
+ Returns:
+ global_descriptors: If `multi_scale_pool_type` is 'None', returns a [S, D]
+ float tensor. S is the number of scales, and D the global descriptor
+ dimensionality. Each D-dimensional entry is a global descriptor, which may
+ be L2-normalized depending on `normalize_global_descriptor`. If
+ `multi_scale_pool_type` is not 'None', returns a [D] float tensor with the
+ pooled global descriptor.
+
+ """
+ original_image_shape_float = tf.gather(
+ tf.dtypes.cast(tf.shape(image), tf.float32), [0, 1])
+
+ image_tensor = gld.NormalizeImages(
+ image, pixel_value_offset=128.0, pixel_value_scale=128.0)
+ image_tensor = tf.expand_dims(image_tensor, 0, name='image/expand_dims')
+
+ def _ProcessSingleScale(scale_index, global_descriptors=None):
+ """Resizes the image and runs feature extraction.
+
+ This function will be passed into tf.while_loop() and be called
+ repeatedly. We get the current scale by image_scales[scale_index], and
+ run image resizing / feature extraction. In the end, we concat the
+ previous global descriptors with current descriptor as the output.
+
+ Args:
+ scale_index: A valid index in image_scales.
+ global_descriptors: Global descriptor tensor with the shape of [S, D]. If
+ None, no previous global descriptors are used, and the output will be of
+ shape [1, D].
+
+ Returns:
+ scale_index: The next scale index for processing.
+ global_descriptors: A concatenated global descriptor tensor with the shape
+ of [S+1, D].
+ """
+ scale = tf.gather(image_scales, scale_index)
+ new_image_size = tf.dtypes.cast(
+ tf.round(original_image_shape_float * scale), tf.int32)
+ resized_image = tf.image.resize(image_tensor, new_image_size)
+
+ global_descriptor = model_fn(resized_image)
+ if global_descriptors is None:
+ global_descriptors = global_descriptor
+ else:
+ global_descriptors = tf.concat([global_descriptors, global_descriptor], 0)
+
+ return scale_index + 1, global_descriptors
+
+ # Process the first scale separately, the following scales will reuse the
+ # graph variables.
+ (_, output_global) = _ProcessSingleScale(0)
+
+ i = tf.constant(1, dtype=tf.int32)
+ num_scales = tf.shape(image_scales)[0]
+ keep_going = lambda j, g: tf.less(j, num_scales)
+
+ (_, output_global) = tf.nest.map_structure(
+ tf.stop_gradient,
+ tf.while_loop(
+ cond=keep_going,
+ body=_ProcessSingleScale,
+ loop_vars=[i, output_global],
+ shape_invariants=[i.get_shape(),
+ tf.TensorShape([None, None])]))
+
+ normalization_axis = 1
+ if multi_scale_pool_type == 'average':
+ output_global = tf.reduce_mean(
+ output_global,
+ axis=0,
+ keepdims=False,
+ name='multi_scale_average_pooling')
+ normalization_axis = 0
+ elif multi_scale_pool_type == 'sum':
+ output_global = tf.reduce_sum(
+ output_global, axis=0, keepdims=False, name='multi_scale_sum_pooling')
+ normalization_axis = 0
+
+ if normalize_global_descriptor:
+ output_global = tf.nn.l2_normalize(
+ output_global, axis=normalization_axis, name='l2_normalization')
+
+ return output_global
diff --git a/research/delf/delf/python/training/model/resnet50.py b/research/delf/delf/python/training/model/resnet50.py
index 1c4d7c2f68dea12d74fcd32a8b52fd1285e92b59..6daaab67419d99ebcefd7b25f89c284bf00832af 100644
--- a/research/delf/delf/python/training/model/resnet50.py
+++ b/research/delf/delf/python/training/model/resnet50.py
@@ -22,9 +22,14 @@ from __future__ import division
from __future__ import print_function
import functools
+import os
+import tempfile
+from absl import logging
+import h5py
import tensorflow as tf
+
layers = tf.keras.layers
@@ -284,8 +289,8 @@ class ResNet50(tf.keras.Model):
else:
self.global_pooling = None
- def call(self, inputs, training=True, intermediates_dict=None):
- """Call the ResNet50 model.
+ def build_call(self, inputs, training=True, intermediates_dict=None):
+ """Building the ResNet50 model.
Args:
inputs: Images to compute features for.
@@ -356,3 +361,79 @@ class ResNet50(tf.keras.Model):
return self.global_pooling(x)
else:
return x
+
+ def call(self, inputs, training=True, intermediates_dict=None):
+ """Call the ResNet50 model.
+
+ Args:
+ inputs: Images to compute features for.
+ training: Whether model is in training phase.
+ intermediates_dict: `None` or dictionary. If not None, accumulate feature
+ maps from intermediate blocks into the dictionary. ""
+
+ Returns:
+ Tensor with featuremap.
+ """
+ return self.build_call(inputs, training, intermediates_dict)
+
+ def restore_weights(self, filepath):
+ """Load pretrained weights.
+
+ This function loads a .h5 file from the filepath with saved model weights
+ and assigns them to the model.
+
+ Args:
+ filepath: String, path to the .h5 file
+ Raises:
+ ValueError: if the file referenced by `filepath` does not exist.
+ """
+ if not tf.io.gfile.exists(filepath):
+ raise ValueError('Unable to load weights from %s. You must provide a'
+ 'valid file.' % (filepath))
+
+ # Create a local copy of the weights file for h5py to be able to read it.
+ local_filename = os.path.basename(filepath)
+ tmp_filename = os.path.join(tempfile.gettempdir(), local_filename)
+ tf.io.gfile.copy(filepath, tmp_filename, overwrite=True)
+
+ # Load the content of the weights file.
+ f = h5py.File(tmp_filename, mode='r')
+ saved_layer_names = [n.decode('utf8') for n in f.attrs['layer_names']]
+
+ try:
+ # Iterate through all the layers assuming the max `depth` is 2.
+ for layer in self.layers:
+ if hasattr(layer, 'layers'):
+ for inlayer in layer.layers:
+ # Make sure the weights are in the saved model, and that we are in
+ # the innermost layer.
+ if inlayer.name not in saved_layer_names:
+ raise ValueError('Layer %s absent from the pretrained weights.'
+ 'Unable to load its weights.' % (inlayer.name))
+ if hasattr(inlayer, 'layers'):
+ raise ValueError('Layer %s is not a depth 2 layer. Unable to load'
+ 'its weights.' % (inlayer.name))
+ # Assign the weights in the current layer.
+ g = f[inlayer.name]
+ weight_names = [n.decode('utf8') for n in g.attrs['weight_names']]
+ weight_values = [g[weight_name] for weight_name in weight_names]
+ print('Setting the weights for layer %s' % (inlayer.name))
+ inlayer.set_weights(weight_values)
+ finally:
+ # Clean up the temporary file.
+ tf.io.gfile.remove(tmp_filename)
+
+ def log_weights(self):
+ """Log backbone weights."""
+ logging.info('Logging backbone weights')
+ logging.info('------------------------')
+ for layer in self.layers:
+ if hasattr(layer, 'layers'):
+ for inlayer in layer.layers:
+ logging.info('Weights for layer: %s, inlayer % s', layer.name,
+ inlayer.name)
+ weights = inlayer.get_weights()
+ logging.info(weights)
+ else:
+ logging.info('Layer %s does not have inner layers.',
+ layer.name)
diff --git a/research/delf/delf/python/training/train.py b/research/delf/delf/python/training/train.py
index dcf61b3f35a8e9f580b7f9f143fbe2281172de04..12b7a5f9cc3282e59c738f74c7fbd4798021c429 100644
--- a/research/delf/delf/python/training/train.py
+++ b/research/delf/delf/python/training/train.py
@@ -43,13 +43,20 @@ flags.DEFINE_string('train_file_pattern', '/tmp/data/train*',
'File pattern of training dataset files.')
flags.DEFINE_string('validation_file_pattern', '/tmp/data/validation*',
'File pattern of validation dataset files.')
+flags.DEFINE_enum(
+ 'dataset_version', 'gld_v1', ['gld_v1', 'gld_v2', 'gld_v2_clean'],
+ 'Google Landmarks dataset version, used to determine the'
+ 'number of classes.')
flags.DEFINE_integer('seed', 0, 'Seed to training dataset.')
-flags.DEFINE_float('initial_lr', 0.001, 'Initial learning rate.')
+flags.DEFINE_float('initial_lr', 0.01, 'Initial learning rate.')
flags.DEFINE_integer('batch_size', 32, 'Global batch size.')
flags.DEFINE_integer('max_iters', 500000, 'Maximum iterations.')
-flags.DEFINE_boolean('block3_strides', False, 'Whether to use block3_strides.')
+flags.DEFINE_boolean('block3_strides', True, 'Whether to use block3_strides.')
flags.DEFINE_boolean('use_augmentation', True,
'Whether to use ImageNet style augmentation.')
+flags.DEFINE_string(
+ 'imagenet_checkpoint', None,
+ 'ImageNet checkpoint for ResNet backbone. If None, no checkpoint is used.')
def _record_accuracy(metric, logits, labels):
@@ -60,6 +67,10 @@ def _record_accuracy(metric, logits, labels):
def _attention_summaries(scores, global_step):
"""Record statistics of the attention score."""
+ tf.summary.image(
+ 'batch_attention',
+ scores / tf.reduce_max(scores + 1e-3),
+ step=global_step)
tf.summary.scalar('attention/max', tf.reduce_max(scores), step=global_step)
tf.summary.scalar('attention/min', tf.reduce_min(scores), step=global_step)
tf.summary.scalar('attention/mean', tf.reduce_mean(scores), step=global_step)
@@ -120,7 +131,7 @@ def main(argv):
max_iters = FLAGS.max_iters
global_batch_size = FLAGS.batch_size
image_size = 321
- num_eval = 1000
+ num_eval_batches = int(50000 / global_batch_size)
report_interval = 100
eval_interval = 1000
save_interval = 20000
@@ -130,15 +141,16 @@ def main(argv):
clip_val = tf.constant(10.0)
if FLAGS.debug:
+ tf.config.run_functions_eagerly(True)
global_batch_size = 4
- max_iters = 4
- num_eval = 1
+ max_iters = 100
+ num_eval_batches = 1
save_interval = 1
report_interval = 1
- # TODO(andrearaujo): Using placeholder, replace with actual value using
- # GoogleLandmarksInfo() from datasets/googlelandmarks.py.
- num_classes = 14951
+ # Determine the number of classes based on the version of the dataset.
+ gld_info = gld.GoogleLandmarksInfo()
+ num_classes = gld_info.num_classes[FLAGS.dataset_version]
# ------------------------------------------------------------
# Create the distributed train/validation sets.
@@ -155,11 +167,12 @@ def main(argv):
augmentation=False,
seed=FLAGS.seed)
- train_iterator = strategy.make_dataset_iterator(train_dataset)
- validation_iterator = strategy.make_dataset_iterator(validation_dataset)
+ train_dist_dataset = strategy.experimental_distribute_dataset(train_dataset)
+ validation_dist_dataset = strategy.experimental_distribute_dataset(
+ validation_dataset)
- train_iterator.initialize()
- validation_iterator.initialize()
+ train_iter = iter(train_dist_dataset)
+ validation_iter = iter(validation_dist_dataset)
# Create a checkpoint directory to store the checkpoints.
checkpoint_prefix = os.path.join(FLAGS.logdir, 'delf_tf2-ckpt')
@@ -215,11 +228,14 @@ def main(argv):
labels = tf.clip_by_value(labels, 0, model.num_classes)
global_step = optimizer.iterations
+ tf.summary.image('batch_images', (images + 1.0) / 2.0, step=global_step)
tf.summary.scalar(
'image_range/max', tf.reduce_max(images), step=global_step)
tf.summary.scalar(
'image_range/min', tf.reduce_min(images), step=global_step)
+ # TODO(andrearaujo): we should try to unify the backprop into a single
+ # function, instead of applying once to descriptor then to attention.
def _backprop_loss(tape, loss, weights):
"""Backpropogate losses using clipped gradients.
@@ -340,12 +356,25 @@ def main(argv):
with tf.summary.record_if(
tf.math.equal(0, optimizer.iterations % report_interval)):
+ # TODO(dananghel): try to load pretrained weights at backbone creation.
+ # Load pretrained weights for ResNet50 trained on ImageNet.
+ if FLAGS.imagenet_checkpoint is not None:
+ logging.info('Attempting to load ImageNet pretrained weights.')
+ input_batch = next(train_iter)
+ _, _ = distributed_train_step(input_batch)
+ model.backbone.restore_weights(FLAGS.imagenet_checkpoint)
+ logging.info('Done.')
+ else:
+ logging.info('Skip loading ImageNet pretrained weights.')
+ if FLAGS.debug:
+ model.backbone.log_weights()
+
global_step_value = optimizer.iterations.numpy()
while global_step_value < max_iters:
# input_batch : images(b, h, w, c), labels(b,).
try:
- input_batch = train_iterator.get_next()
+ input_batch = next(train_iter)
except tf.errors.OutOfRangeError:
# Break if we run out of data in the dataset.
logging.info('Stopping training at global step %d, no more data',
@@ -388,9 +417,9 @@ def main(argv):
# Validate once in {eval_interval*n, n \in N} steps.
if global_step_value % eval_interval == 0:
- for i in range(num_eval):
+ for i in range(num_eval_batches):
try:
- validation_batch = validation_iterator.get_next()
+ validation_batch = next(validation_iter)
desc_validation_result, attn_validation_result = (
distributed_validation_step(validation_batch))
except tf.errors.OutOfRangeError:
@@ -412,13 +441,17 @@ def main(argv):
print(' : attn:', attn_validation_result.numpy())
# Save checkpoint once (each save_interval*n, n \in N) steps.
+ # TODO(andrearaujo): save only in one of the two ways. They are
+ # identical, the only difference is that the manager adds some extra
+ # prefixes and variables (eg, optimizer variables).
if global_step_value % save_interval == 0:
save_path = manager.save()
- logging.info('Saved({global_step_value}) at %s', save_path)
+ logging.info('Saved (%d) at %s', global_step_value, save_path)
file_path = '%s/delf_weights' % FLAGS.logdir
model.save_weights(file_path, save_format='tf')
- logging.info('Saved weights({global_step_value}) at %s', file_path)
+ logging.info('Saved weights (%d) at %s', global_step_value,
+ file_path)
# Reset metrics for next step.
desc_train_accuracy.reset_states()
diff --git a/research/efficient-hrl/agent.py b/research/efficient-hrl/agent.py
index cb02b51fa9eb9d98c83ca863d4cfe8a9c90008ce..0028ddffa0d37a0e80d2c990e6263a3d9b4ab948 100644
--- a/research/efficient-hrl/agent.py
+++ b/research/efficient-hrl/agent.py
@@ -149,7 +149,7 @@ class UvfAgentCore(object):
error = tf.square(actions - pred_actions)
spec_range = (self._action_spec.maximum - self._action_spec.minimum) / 2
- normalized_error = error / tf.constant(spec_range) ** 2
+ normalized_error = tf.cast(error, tf.float64) / tf.constant(spec_range) ** 2
return -normalized_error
@gin.configurable('uvf_add_noise_fn')
diff --git a/research/neural_programmer/README.md b/research/neural_programmer/README.md
index 6101a85b9651fb7ad3de4f66af722c384c95d69a..dcc27f6fb015ec625935a0ea37d814a2ba10d2e3 100644
--- a/research/neural_programmer/README.md
+++ b/research/neural_programmer/README.md
@@ -4,20 +4,23 @@
# Neural Programmer
-Implementation of the Neural Programmer model described in [paper](https://openreview.net/pdf?id=ry2YOrcge)
+Implementation of the Neural Programmer model as described in this [paper](https://openreview.net/pdf?id=ry2YOrcge).
-Download and extract the data from [dropbox](https://www.dropbox.com/s/9tvtcv6lmy51zfw/data.zip?dl=0). Change the ``data_dir FLAG`` to the location of the data.
+Download and extract the data from the [WikiTableQuestions](https://ppasupat.github.io/WikiTableQuestions/) site. The dataset contains
+11321, 2831, and 4344 examples for training, development, and testing respectively. We use their tokenization, number and date pre-processing. Please note that the above paper used the [initial release](https://github.com/ppasupat/WikiTableQuestions/releases/tag/v0.2) for training, development and testing.
+
+Change the `data_dir FLAG` to the location of the data.
### Training
-``python neural_programmer.py``
+Run `python neural_programmer.py`
-The models are written to FLAGS.output_dir
+The models are written to `FLAGS.output_dir`.
### Testing
-``python neural_programmer.py --evaluator_job=True``
+Run `python neural_programmer.py --evaluator_job=True`
-The models are loaded from ``FLAGS.output_dir``. The evaluation is done on development data.
+The models are loaded from `FLAGS.output_dir`. The evaluation is done on development data.
-In case of errors because of encoding, add ``"# -*- coding: utf-8 -*-"`` as the first line in ``wiki_data.py``
+In case of errors because of encoding, add `"# -*- coding: utf-8 -*-"` as the first line in `wiki_data.py`
Maintained by Arvind Neelakantan (arvind2505)
diff --git a/research/object_detection/README.md b/research/object_detection/README.md
index b6dc9ad0ae97caa359b90ef5108de43c6ce71734..c88e88c4703754cf0a59088f8b57f0e29687e4ef 100644
--- a/research/object_detection/README.md
+++ b/research/object_detection/README.md
@@ -2,17 +2,16 @@

# Tensorflow Object Detection API
+
Creating accurate machine learning models capable of localizing and identifying
multiple objects in a single image remains a core challenge in computer vision.
The TensorFlow Object Detection API is an open source framework built on top of
TensorFlow that makes it easy to construct, train and deploy object detection
-models. At Google we’ve certainly found this codebase to be useful for our
-computer vision needs, and we hope that you will as well.
-
-
-
+models. At Google we’ve certainly found this codebase to be useful for our
+computer vision needs, and we hope that you will as well.
+
Contributions to the codebase are welcome and we would love to hear back from
-you if you find this API useful. Finally if you use the Tensorflow Object
+you if you find this API useful. Finally if you use the Tensorflow Object
Detection API for a research publication, please consider citing:
```
@@ -20,8 +19,8 @@ Detection API for a research publication, please consider citing:
Huang J, Rathod V, Sun C, Zhu M, Korattikara A, Fathi A, Fischer I, Wojna Z,
Song Y, Guadarrama S, Murphy K, CVPR 2017
```
-\[[link](https://arxiv.org/abs/1611.10012)\]\[[bibtex](
-https://scholar.googleusercontent.com/scholar.bib?q=info:l291WsrB-hQJ:scholar.google.com/&output=citation&scisig=AAGBfm0AAAAAWUIIlnPZ_L9jxvPwcC49kDlELtaeIyU-&scisf=4&ct=citation&cd=-1&hl=en&scfhb=1)\]
+
+\[[link](https://arxiv.org/abs/1611.10012)\]\[[bibtex](https://scholar.googleusercontent.com/scholar.bib?q=info:l291WsrB-hQJ:scholar.google.com/&output=citation&scisig=AAGBfm0AAAAAWUIIlnPZ_L9jxvPwcC49kDlELtaeIyU-&scisf=4&ct=citation&cd=-1&hl=en&scfhb=1)\]
@@ -29,63 +28,65 @@ https://scholar.googleusercontent.com/scholar.bib?q=info:l291WsrB-hQJ:scholar.go
## Maintainers
-| Name | GitHub |
-| --- | --- |
-| Jonathan Huang | [jch1](https://github.com/jch1) |
-| Vivek Rathod | [tombstone](https://github.com/tombstone) |
-| Ronny Votel | [ronnyvotel](https://github.com/ronnyvotel) |
-| Derek Chow | [derekjchow](https://github.com/derekjchow) |
-| Chen Sun | [jesu9](https://github.com/jesu9) |
-| Menglong Zhu | [dreamdragon](https://github.com/dreamdragon) |
-| Alireza Fathi | [afathi3](https://github.com/afathi3) |
-| Zhichao Lu | [pkulzc](https://github.com/pkulzc) |
+Name | GitHub
+-------------- | ---------------------------------------------
+Jonathan Huang | [jch1](https://github.com/jch1)
+Vivek Rathod | [tombstone](https://github.com/tombstone)
+Ronny Votel | [ronnyvotel](https://github.com/ronnyvotel)
+Derek Chow | [derekjchow](https://github.com/derekjchow)
+Chen Sun | [jesu9](https://github.com/jesu9)
+Menglong Zhu | [dreamdragon](https://github.com/dreamdragon)
+Alireza Fathi | [afathi3](https://github.com/afathi3)
+Zhichao Lu | [pkulzc](https://github.com/pkulzc)
## Table of contents
Setup:
- * Installation
+* Installation
Quick Start:
- *
+*
Quick Start: Jupyter notebook for off-the-shelf inference
- * Quick Start: Training a pet detector
+* Quick Start: Training a pet detector
Customizing a Pipeline:
- *
+*
Configuring an object detection pipeline
- * Preparing inputs
+* Preparing inputs
Running:
- * Running locally
- * Running on the cloud
+* Running locally
+* Running on the cloud
Extras:
- * Tensorflow detection model zoo
- *
+* Tensorflow detection model zoo
+*
Exporting a trained model for inference
- *
+*
Exporting a trained model for TPU inference
- *
+*
Defining your own model architecture
- *
+*
Bringing in your own dataset
- *
+*
Supported object detection evaluation protocols
- *
+*
Inference and evaluation on the Open Images dataset
- *
+*
Run an instance segmentation model
- *
+*
Run the evaluation for the Open Images Challenge 2018/2019
- *
+*
TPU compatible detection pipelines
- *
+*
Running object detection on mobile devices with TensorFlow Lite
+*
+ Context R-CNN documentation for data preparation, training, and export
## Getting Help
@@ -98,78 +99,107 @@ tensorflow/models GitHub
[issue tracker](https://github.com/tensorflow/models/issues), prefixing the
issue name with "object_detection".
-Please check [FAQ](g3doc/faq.md) for frequently asked questions before
-reporting an issue.
-
+Please check [FAQ](g3doc/faq.md) for frequently asked questions before reporting
+an issue.
## Release information
+### June 17th, 2020
+
+We have released [Context R-CNN](https://arxiv.org/abs/1912.03538), a model that
+uses attention to incorporate contextual information images (e.g. from
+temporally nearby frames taken by a static camera) in order to improve accuracy.
+Importantly, these contextual images need not be labeled.
+
+* When applied to a challenging wildlife detection dataset ([Snapshot Serengeti](http://lila.science/datasets/snapshot-serengeti)),
+ Context R-CNN with context from up to a month of images outperforms a
+ single-frame baseline by 17.9% mAP, and outperforms S3D (a 3d convolution
+ based baseline) by 11.2% mAP.
+* Context R-CNN leverages temporal context from the unlabeled frames of a
+ novel camera deployment to improve performance at that camera, boosting
+ model generalizeability.
+
+Read about Context R-CNN on the Google AI blog [here](https://ai.googleblog.com/2020/06/leveraging-temporal-context-for-object.html).
+
+We have provided code for generating data with associated context
+[here](g3doc/context_rcnn.md), and a sample config for a Context R-CNN
+model [here](samples/configs/context_rcnn_resnet101_snapshot_serengeti_sync.config).
+
+Snapshot Serengeti-trained Faster R-CNN and Context R-CNN models can be found in
+the [model zoo](https://github.com/tensorflow/models/blob/master/research/object_detection/g3doc/detection_model_zoo.md#snapshot-serengeti-camera-trap-trained-models).
+
+A colab demonstrating Context R-CNN is provided
+[here](colab_tutorials/context_rcnn_tutorial.ipynb).
+
+Thanks to contributors: Sara Beery, Jonathan Huang, Guanhang Wu, Vivek
+Rathod, Ronny Votel, Zhichao Lu, David Ross, Pietro Perona, Tanya Birch, and
+the Wildlife Insights AI Team.
### May 19th, 2020
-We have released
-[MobileDets](https://arxiv.org/abs/2004.14525),
-a set of high-performance models for mobile CPUs, DSPs and EdgeTPUs.
-* MobileDets outperform MobileNetV3+SSDLite by 1.7 mAP at comparable mobile CPU
-inference latencies. MobileDets also outperform MobileNetV2+SSDLite by 1.9 mAP
-on mobile CPUs, 3.7 mAP on EdgeTPUs and 3.4 mAP on DSPs while running equally
-fast. MobileDets also offer up to 2x speedup over MnasFPN on EdgeTPUs and DSPs.
+We have released [MobileDets](https://arxiv.org/abs/2004.14525), a set of
+high-performance models for mobile CPUs, DSPs and EdgeTPUs.
+
+* MobileDets outperform MobileNetV3+SSDLite by 1.7 mAP at comparable mobile
+ CPU inference latencies. MobileDets also outperform MobileNetV2+SSDLite by
+ 1.9 mAP on mobile CPUs, 3.7 mAP on EdgeTPUs and 3.4 mAP on DSPs while
+ running equally fast. MobileDets also offer up to 2x speedup over MnasFPN on
+ EdgeTPUs and DSPs.
For each of the three hardware platforms we have released model definition,
model checkpoints trained on the COCO14 dataset and converted TFLite models in
fp32 and/or uint8.
-Thanks to contributors: Yunyang Xiong, Hanxiao Liu, Suyog Gupta,
-Berkin Akin, Gabriel Bender, Pieter-Jan Kindermans, Mingxing Tan, Vikas Singh,
-Bo Chen, Quoc Le, Zhichao Lu.
-
+Thanks to contributors: Yunyang Xiong, Hanxiao Liu, Suyog Gupta, Berkin
+Akin, Gabriel Bender, Pieter-Jan Kindermans, Mingxing Tan, Vikas Singh, Bo Chen,
+Quoc Le, Zhichao Lu.
### May 7th, 2020
+
We have released a mobile model with the
[MnasFPN head](https://arxiv.org/abs/1912.01106).
+* MnasFPN with MobileNet-V2 backbone is the most accurate (26.6 mAP at 183ms
+ on Pixel 1) mobile detection model we have released to date. With
+ depth-multiplier, MnasFPN with MobileNet-V2 backbone is 1.8 mAP higher than
+ MobileNet-V3-Large with SSDLite (23.8 mAP vs 22.0 mAP) at similar latency
+ (120ms) on Pixel 1.
-* MnasFPN with MobileNet-V2 backbone is the most accurate (26.6 mAP at 183ms on
-Pixel 1) mobile detection model we have released to date. With depth-multiplier,
-MnasFPN with MobileNet-V2 backbone is 1.8 mAP higher than MobileNet-V3-Large
-with SSDLite (23.8 mAP vs 22.0 mAP) at similar latency (120ms) on Pixel 1.
-
-We have released model definition, model checkpoints trained on
-the COCO14 dataset and a converted TFLite model.
-
-Thanks to contributors: Bo Chen, Golnaz Ghiasi, Hanxiao Liu,
-Tsung-Yi Lin, Dmitry Kalenichenko, Hartwig Adam, Quoc Le, Zhichao Lu,
-Jonathan Huang, Hao Xu.
-
+We have released model definition, model checkpoints trained on the COCO14
+dataset and a converted TFLite model.
+Thanks to contributors: Bo Chen, Golnaz Ghiasi, Hanxiao Liu, Tsung-Yi
+Lin, Dmitry Kalenichenko, Hartwig Adam, Quoc Le, Zhichao Lu, Jonathan Huang, Hao
+Xu.
### Nov 13th, 2019
+
We have released MobileNetEdgeTPU SSDLite model.
-* SSDLite with MobileNetEdgeTPU backbone, which achieves 10% mAP higher than
-MobileNetV2 SSDLite (24.3 mAP vs 22 mAP) on a Google Pixel4 at comparable
-latency (6.6ms vs 6.8ms).
+* SSDLite with MobileNetEdgeTPU backbone, which achieves 10% mAP higher than
+ MobileNetV2 SSDLite (24.3 mAP vs 22 mAP) on a Google Pixel4 at comparable
+ latency (6.6ms vs 6.8ms).
-Along with the model definition, we are also releasing model checkpoints
-trained on the COCO dataset.
+Along with the model definition, we are also releasing model checkpoints trained
+on the COCO dataset.
Thanks to contributors: Yunyang Xiong, Bo Chen, Suyog Gupta, Hanxiao Liu,
Gabriel Bender, Mingxing Tan, Berkin Akin, Zhichao Lu, Quoc Le
### Oct 15th, 2019
+
We have released two MobileNet V3 SSDLite models (presented in
[Searching for MobileNetV3](https://arxiv.org/abs/1905.02244)).
-* SSDLite with MobileNet-V3-Large backbone, which is 27% faster than Mobilenet
-V2 SSDLite (119ms vs 162ms) on a Google Pixel phone CPU at the same mAP.
-* SSDLite with MobileNet-V3-Small backbone, which is 37% faster than MnasNet
-SSDLite reduced with depth-multiplier (43ms vs 68ms) at the same mAP.
+* SSDLite with MobileNet-V3-Large backbone, which is 27% faster than Mobilenet
+ V2 SSDLite (119ms vs 162ms) on a Google Pixel phone CPU at the same mAP.
+* SSDLite with MobileNet-V3-Small backbone, which is 37% faster than MnasNet
+ SSDLite reduced with depth-multiplier (43ms vs 68ms) at the same mAP.
-Along with the model definition, we are also releasing model checkpoints
-trained on the COCO dataset.
+Along with the model definition, we are also releasing model checkpoints trained
+on the COCO dataset.
Thanks to contributors: Bo Chen, Zhichao Lu, Vivek Rathod, Jonathan Huang
-
### July 1st, 2019
We have released an updated set of utils and an updated
@@ -177,28 +207,30 @@ We have released an updated set of utils and an updated
[Open Images Challenge 2019](https://storage.googleapis.com/openimages/web/challenge2019.html)!
The Instance Segmentation metric for
-[Open Images V5](https://storage.googleapis.com/openimages/web/index.html)
-and [Challenge 2019](https://storage.googleapis.com/openimages/web/challenge2019.html)
-is part of this release. Check out [the metric description](https://storage.googleapis.com/openimages/web/evaluation.html#instance_segmentation_eval)
+[Open Images V5](https://storage.googleapis.com/openimages/web/index.html) and
+[Challenge 2019](https://storage.googleapis.com/openimages/web/challenge2019.html)
+is part of this release. Check out
+[the metric description](https://storage.googleapis.com/openimages/web/evaluation.html#instance_segmentation_eval)
on the Open Images website.
Thanks to contributors: Alina Kuznetsova, Rodrigo Benenson
### Feb 11, 2019
-We have released detection models trained on the Open Images Dataset V4
-in our detection model zoo, including
+We have released detection models trained on the Open Images Dataset V4 in our
+detection model zoo, including
-* Faster R-CNN detector with Inception Resnet V2 feature extractor
-* SSD detector with MobileNet V2 feature extractor
-* SSD detector with ResNet 101 FPN feature extractor (aka RetinaNet-101)
+* Faster R-CNN detector with Inception Resnet V2 feature extractor
+* SSD detector with MobileNet V2 feature extractor
+* SSD detector with ResNet 101 FPN feature extractor (aka RetinaNet-101)
Thanks to contributors: Alina Kuznetsova, Yinxiao Li
### Sep 17, 2018
We have released Faster R-CNN detectors with ResNet-50 / ResNet-101 feature
-extractors trained on the [iNaturalist Species Detection Dataset](https://github.com/visipedia/inat_comp/blob/master/2017/README.md#bounding-boxes).
+extractors trained on the
+[iNaturalist Species Detection Dataset](https://github.com/visipedia/inat_comp/blob/master/2017/README.md#bounding-boxes).
The models are trained on the training split of the iNaturalist data for 4M
iterations, they achieve 55% and 58% mean AP@.5 over 2854 classes respectively.
For more details please refer to this [paper](https://arxiv.org/abs/1707.06642).
@@ -210,42 +242,59 @@ For more details please refer to this [paper](https://arxiv.org/abs/1707.06642).
There are many new updates in this release, extending the functionality and
capability of the API:
-* Moving from slim-based training to [Estimator](https://www.tensorflow.org/api_docs/python/tf/estimator/Estimator)-based
-training.
-* Support for [RetinaNet](https://arxiv.org/abs/1708.02002), and a [MobileNet](https://ai.googleblog.com/2017/06/mobilenets-open-source-models-for.html)
-adaptation of RetinaNet.
-* A novel SSD-based architecture called the [Pooling Pyramid Network](https://arxiv.org/abs/1807.03284) (PPN).
-* Releasing several [TPU](https://cloud.google.com/tpu/)-compatible models.
-These can be found in the `samples/configs/` directory with a comment in the
-pipeline configuration files indicating TPU compatibility.
-* Support for quantized training.
-* Updated documentation for new binaries, Cloud training, and [Tensorflow Lite](https://www.tensorflow.org/mobile/tflite/).
-
-See also our [expanded announcement blogpost](https://ai.googleblog.com/2018/07/accelerated-training-and-inference-with.html) and accompanying tutorial at the [TensorFlow blog](https://medium.com/tensorflow/training-and-serving-a-realtime-mobile-object-detector-in-30-minutes-with-cloud-tpus-b78971cf1193).
+* Moving from slim-based training to
+ [Estimator](https://www.tensorflow.org/api_docs/python/tf/estimator/Estimator)-based
+ training.
+* Support for [RetinaNet](https://arxiv.org/abs/1708.02002), and a
+ [MobileNet](https://ai.googleblog.com/2017/06/mobilenets-open-source-models-for.html)
+ adaptation of RetinaNet.
+* A novel SSD-based architecture called the
+ [Pooling Pyramid Network](https://arxiv.org/abs/1807.03284) (PPN).
+* Releasing several [TPU](https://cloud.google.com/tpu/)-compatible models.
+ These can be found in the `samples/configs/` directory with a comment in the
+ pipeline configuration files indicating TPU compatibility.
+* Support for quantized training.
+* Updated documentation for new binaries, Cloud training, and
+ [Tensorflow Lite](https://www.tensorflow.org/mobile/tflite/).
+
+See also our
+[expanded announcement blogpost](https://ai.googleblog.com/2018/07/accelerated-training-and-inference-with.html)
+and accompanying tutorial at the
+[TensorFlow blog](https://medium.com/tensorflow/training-and-serving-a-realtime-mobile-object-detector-in-30-minutes-with-cloud-tpus-b78971cf1193).
Thanks to contributors: Sara Robinson, Aakanksha Chowdhery, Derek Chow,
Pengchong Jin, Jonathan Huang, Vivek Rathod, Zhichao Lu, Ronny Votel
-
### June 25, 2018
-Additional evaluation tools for the [Open Images Challenge 2018](https://storage.googleapis.com/openimages/web/challenge.html) are out.
-Check out our short tutorial on data preparation and running evaluation [here](g3doc/challenge_evaluation.md)!
+Additional evaluation tools for the
+[Open Images Challenge 2018](https://storage.googleapis.com/openimages/web/challenge.html)
+are out. Check out our short tutorial on data preparation and running evaluation
+[here](g3doc/challenge_evaluation.md)!
Thanks to contributors: Alina Kuznetsova
### June 5, 2018
-We have released the implementation of evaluation metrics for both tracks of the [Open Images Challenge 2018](https://storage.googleapis.com/openimages/web/challenge.html) as a part of the Object Detection API - see the [evaluation protocols](g3doc/evaluation_protocols.md) for more details.
-Additionally, we have released a tool for hierarchical labels expansion for the Open Images Challenge: check out [oid_hierarchical_labels_expansion.py](dataset_tools/oid_hierarchical_labels_expansion.py).
+We have released the implementation of evaluation metrics for both tracks of the
+[Open Images Challenge 2018](https://storage.googleapis.com/openimages/web/challenge.html)
+as a part of the Object Detection API - see the
+[evaluation protocols](g3doc/evaluation_protocols.md) for more details.
+Additionally, we have released a tool for hierarchical labels expansion for the
+Open Images Challenge: check out
+[oid_hierarchical_labels_expansion.py](dataset_tools/oid_hierarchical_labels_expansion.py).
-Thanks to contributors: Alina Kuznetsova, Vittorio Ferrari, Jasper Uijlings
+Thanks to contributors: Alina Kuznetsova, Vittorio Ferrari, Jasper
+Uijlings
### April 30, 2018
-We have released a Faster R-CNN detector with ResNet-101 feature extractor trained on [AVA](https://research.google.com/ava/) v2.1.
-Compared with other commonly used object detectors, it changes the action classification loss function to per-class Sigmoid loss to handle boxes with multiple labels.
-The model is trained on the training split of AVA v2.1 for 1.5M iterations, it achieves mean AP of 11.25% over 60 classes on the validation split of AVA v2.1.
+We have released a Faster R-CNN detector with ResNet-101 feature extractor
+trained on [AVA](https://research.google.com/ava/) v2.1. Compared with other
+commonly used object detectors, it changes the action classification loss
+function to per-class Sigmoid loss to handle boxes with multiple labels. The
+model is trained on the training split of AVA v2.1 for 1.5M iterations, it
+achieves mean AP of 11.25% over 60 classes on the validation split of AVA v2.1.
For more details please refer to this [paper](https://arxiv.org/abs/1705.08421).
Thanks to contributors: Chen Sun, David Ross
@@ -255,84 +304,94 @@ For more details please refer to this [paper](https://arxiv.org/abs/1705.08421).
Supercharge your mobile phones with the next generation mobile object detector!
We are adding support for MobileNet V2 with SSDLite presented in
[MobileNetV2: Inverted Residuals and Linear Bottlenecks](https://arxiv.org/abs/1801.04381).
-This model is 35% faster than Mobilenet V1 SSD on a Google Pixel phone CPU (200ms vs. 270ms) at the same accuracy.
-Along with the model definition, we are also releasing a model checkpoint trained on the COCO dataset.
+This model is 35% faster than Mobilenet V1 SSD on a Google Pixel phone CPU
+(200ms vs. 270ms) at the same accuracy. Along with the model definition, we are
+also releasing a model checkpoint trained on the COCO dataset.
-Thanks to contributors: Menglong Zhu, Mark Sandler, Zhichao Lu, Vivek Rathod, Jonathan Huang
+Thanks to contributors: Menglong Zhu, Mark Sandler, Zhichao Lu, Vivek
+Rathod, Jonathan Huang
### February 9, 2018
-We now support instance segmentation!! In this API update we support a number of instance segmentation models similar to those discussed in the [Mask R-CNN paper](https://arxiv.org/abs/1703.06870). For further details refer to
-[our slides](http://presentations.cocodataset.org/Places17-GMRI.pdf) from the 2017 Coco + Places Workshop.
-Refer to the section on [Running an Instance Segmentation Model](g3doc/instance_segmentation.md) for instructions on how to configure a model
-that predicts masks in addition to object bounding boxes.
+We now support instance segmentation!! In this API update we support a number of
+instance segmentation models similar to those discussed in the
+[Mask R-CNN paper](https://arxiv.org/abs/1703.06870). For further details refer
+to [our slides](http://presentations.cocodataset.org/Places17-GMRI.pdf) from the
+2017 Coco + Places Workshop. Refer to the section on
+[Running an Instance Segmentation Model](g3doc/instance_segmentation.md) for
+instructions on how to configure a model that predicts masks in addition to
+object bounding boxes.
-Thanks to contributors: Alireza Fathi, Zhichao Lu, Vivek Rathod, Ronny Votel, Jonathan Huang
+Thanks to contributors: Alireza Fathi, Zhichao Lu, Vivek Rathod, Ronny
+Votel, Jonathan Huang
### November 17, 2017
As a part of the Open Images V3 release we have released:
-* An implementation of the Open Images evaluation metric and the [protocol](g3doc/evaluation_protocols.md#open-images).
-* Additional tools to separate inference of detection and evaluation (see [this tutorial](g3doc/oid_inference_and_evaluation.md)).
-* A new detection model trained on the Open Images V2 data release (see [Open Images model](g3doc/detection_model_zoo.md#open-images-models)).
+* An implementation of the Open Images evaluation metric and the
+ [protocol](g3doc/evaluation_protocols.md#open-images).
+* Additional tools to separate inference of detection and evaluation (see
+ [this tutorial](g3doc/oid_inference_and_evaluation.md)).
+* A new detection model trained on the Open Images V2 data release (see
+ [Open Images model](g3doc/detection_model_zoo.md#open-images-models)).
-See more information on the [Open Images website](https://github.com/openimages/dataset)!
+See more information on the
+[Open Images website](https://github.com/openimages/dataset)!
Thanks to contributors: Stefan Popov, Alina Kuznetsova
### November 6, 2017
We have re-released faster versions of our (pre-trained) models in the
-model zoo. In addition to what
-was available before, we are also adding Faster R-CNN models trained on COCO
-with Inception V2 and Resnet-50 feature extractors, as well as a Faster R-CNN
-with Resnet-101 model trained on the KITTI dataset.
+model zoo. In addition to what was
+available before, we are also adding Faster R-CNN models trained on COCO with
+Inception V2 and Resnet-50 feature extractors, as well as a Faster R-CNN with
+Resnet-101 model trained on the KITTI dataset.
-Thanks to contributors: Jonathan Huang, Vivek Rathod, Derek Chow,
-Tal Remez, Chen Sun.
+Thanks to contributors: Jonathan Huang, Vivek Rathod, Derek Chow, Tal
+Remez, Chen Sun.
### October 31, 2017
-We have released a new state-of-the-art model for object detection using
-the Faster-RCNN with the
-[NASNet-A image featurization](https://arxiv.org/abs/1707.07012). This
-model achieves mAP of 43.1% on the test-dev validation dataset for COCO,
-improving on the best available model in the zoo by 6% in terms
-of absolute mAP.
+We have released a new state-of-the-art model for object detection using the
+Faster-RCNN with the
+[NASNet-A image featurization](https://arxiv.org/abs/1707.07012). This model
+achieves mAP of 43.1% on the test-dev validation dataset for COCO, improving on
+the best available model in the zoo by 6% in terms of absolute mAP.
-Thanks to contributors: Barret Zoph, Vijay Vasudevan, Jonathon Shlens, Quoc Le
+Thanks to contributors: Barret Zoph, Vijay Vasudevan, Jonathon Shlens,
+Quoc Le
### August 11, 2017
-We have released an update to the [Android Detect
-demo](https://github.com/tensorflow/tensorflow/tree/master/tensorflow/examples/android)
-which will now run models trained using the Tensorflow Object
-Detection API on an Android device. By default, it currently runs a
-frozen SSD w/Mobilenet detector trained on COCO, but we encourage
-you to try out other detection models!
+We have released an update to the
+[Android Detect demo](https://github.com/tensorflow/tensorflow/tree/master/tensorflow/examples/android)
+which will now run models trained using the Tensorflow Object Detection API on
+an Android device. By default, it currently runs a frozen SSD w/Mobilenet
+detector trained on COCO, but we encourage you to try out other detection
+models!
Thanks to contributors: Jonathan Huang, Andrew Harp
-
### June 15, 2017
-In addition to our base Tensorflow detection model definitions, this
-release includes:
-
-* A selection of trainable detection models, including:
- * Single Shot Multibox Detector (SSD) with MobileNet,
- * SSD with Inception V2,
- * Region-Based Fully Convolutional Networks (R-FCN) with Resnet 101,
- * Faster RCNN with Resnet 101,
- * Faster RCNN with Inception Resnet v2
-* Frozen weights (trained on the COCO dataset) for each of the above models to
- be used for out-of-the-box inference purposes.
-* A [Jupyter notebook](object_detection_tutorial.ipynb) for performing
- out-of-the-box inference with one of our released models
-* Convenient [local training](g3doc/running_locally.md) scripts as well as
- distributed training and evaluation pipelines via
- [Google Cloud](g3doc/running_on_cloud.md).
+In addition to our base Tensorflow detection model definitions, this release
+includes:
+
+* A selection of trainable detection models, including:
+ * Single Shot Multibox Detector (SSD) with MobileNet,
+ * SSD with Inception V2,
+ * Region-Based Fully Convolutional Networks (R-FCN) with Resnet 101,
+ * Faster RCNN with Resnet 101,
+ * Faster RCNN with Inception Resnet v2
+* Frozen weights (trained on the COCO dataset) for each of the above models to
+ be used for out-of-the-box inference purposes.
+* A [Jupyter notebook](colab_tutorials/object_detection_tutorial.ipynb) for
+ performing out-of-the-box inference with one of our released models
+* Convenient [local training](g3doc/running_locally.md) scripts as well as
+ distributed training and evaluation pipelines via
+ [Google Cloud](g3doc/running_on_cloud.md).
Thanks to contributors: Jonathan Huang, Vivek Rathod, Derek Chow, Chen
Sun, Menglong Zhu, Matthew Tang, Anoop Korattikara, Alireza Fathi, Ian Fischer,
diff --git a/research/object_detection/builders/box_predictor_builder_test.py b/research/object_detection/builders/box_predictor_builder_test.py
index 72a71b794c3572d12932ef16868d9793687945e4..7154cd2efc06e2c4581e654d718e3519152bc6bb 100644
--- a/research/object_detection/builders/box_predictor_builder_test.py
+++ b/research/object_detection/builders/box_predictor_builder_test.py
@@ -16,6 +16,7 @@
"""Tests for box_predictor_builder."""
+import unittest
import mock
import tensorflow.compat.v1 as tf
@@ -25,8 +26,10 @@ from object_detection.builders import hyperparams_builder
from object_detection.predictors import mask_rcnn_box_predictor
from object_detection.protos import box_predictor_pb2
from object_detection.protos import hyperparams_pb2
+from object_detection.utils import tf_version
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only Tests.')
class ConvolutionalBoxPredictorBuilderTest(tf.test.TestCase):
def test_box_predictor_calls_conv_argscope_fn(self):
@@ -161,6 +164,7 @@ class ConvolutionalBoxPredictorBuilderTest(tf.test.TestCase):
self.assertFalse(class_head._use_depthwise)
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only Tests.')
class WeightSharedConvolutionalBoxPredictorBuilderTest(tf.test.TestCase):
def test_box_predictor_calls_conv_argscope_fn(self):
@@ -357,6 +361,7 @@ class WeightSharedConvolutionalBoxPredictorBuilderTest(tf.test.TestCase):
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only Tests.')
class MaskRCNNBoxPredictorBuilderTest(tf.test.TestCase):
def test_box_predictor_builder_calls_fc_argscope_fn(self):
@@ -537,6 +542,7 @@ class MaskRCNNBoxPredictorBuilderTest(tf.test.TestCase):
._convolve_then_upsample)
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only Tests.')
class RfcnBoxPredictorBuilderTest(tf.test.TestCase):
def test_box_predictor_calls_fc_argscope_fn(self):
diff --git a/research/object_detection/builders/calibration_builder_test.py b/research/object_detection/builders/calibration_builder_test.py
index a077ef4f92765c4599f16f432223504a2bda577c..a81d53a86e65bc400fe38cac8c96867aa1489607 100644
--- a/research/object_detection/builders/calibration_builder_test.py
+++ b/research/object_detection/builders/calibration_builder_test.py
@@ -25,31 +25,34 @@ from six.moves import zip
import tensorflow.compat.v1 as tf
from object_detection.builders import calibration_builder
from object_detection.protos import calibration_pb2
+from object_detection.utils import test_case
-class CalibrationBuilderTest(tf.test.TestCase):
+class CalibrationBuilderTest(test_case.TestCase):
def test_tf_linear_interp1d_map(self):
"""Tests TF linear interpolation mapping to a single number."""
- with self.test_session() as sess:
+ def graph_fn():
tf_x = tf.constant([0., 0.5, 1.])
tf_y = tf.constant([0.5, 0.5, 0.5])
new_x = tf.constant([0., 0.25, 0.5, 0.75, 1.])
tf_map_outputs = calibration_builder._tf_linear_interp1d(
new_x, tf_x, tf_y)
- tf_map_outputs_np = sess.run([tf_map_outputs])
- self.assertAllClose(tf_map_outputs_np, [[0.5, 0.5, 0.5, 0.5, 0.5]])
+ return tf_map_outputs
+ tf_map_outputs_np = self.execute(graph_fn, [])
+ self.assertAllClose(tf_map_outputs_np, [0.5, 0.5, 0.5, 0.5, 0.5])
def test_tf_linear_interp1d_interpolate(self):
"""Tests TF 1d linear interpolation not mapping to a single number."""
- with self.test_session() as sess:
+ def graph_fn():
tf_x = tf.constant([0., 0.5, 1.])
tf_y = tf.constant([0.6, 0.7, 1.0])
new_x = tf.constant([0., 0.25, 0.5, 0.75, 1.])
tf_interpolate_outputs = calibration_builder._tf_linear_interp1d(
new_x, tf_x, tf_y)
- tf_interpolate_outputs_np = sess.run([tf_interpolate_outputs])
- self.assertAllClose(tf_interpolate_outputs_np, [[0.6, 0.65, 0.7, 0.85, 1.]])
+ return tf_interpolate_outputs
+ tf_interpolate_outputs_np = self.execute(graph_fn, [])
+ self.assertAllClose(tf_interpolate_outputs_np, [0.6, 0.65, 0.7, 0.85, 1.])
@staticmethod
def _get_scipy_interp1d(new_x, x, y):
@@ -59,12 +62,13 @@ class CalibrationBuilderTest(tf.test.TestCase):
def _get_tf_interp1d(self, new_x, x, y):
"""Helper performing 1d linear interpolation using Tensorflow."""
- with self.test_session() as sess:
+ def graph_fn():
tf_interp_outputs = calibration_builder._tf_linear_interp1d(
tf.convert_to_tensor(new_x, dtype=tf.float32),
tf.convert_to_tensor(x, dtype=tf.float32),
tf.convert_to_tensor(y, dtype=tf.float32))
- np_tf_interp_outputs = sess.run(tf_interp_outputs)
+ return tf_interp_outputs
+ np_tf_interp_outputs = self.execute(graph_fn, [])
return np_tf_interp_outputs
def test_tf_linear_interp1d_against_scipy_map(self):
@@ -128,8 +132,7 @@ class CalibrationBuilderTest(tf.test.TestCase):
self._add_function_approximation_to_calibration_proto(
calibration_config, class_agnostic_x, class_agnostic_y, class_id=None)
- od_graph = tf.Graph()
- with self.test_session(graph=od_graph) as sess:
+ def graph_fn():
calibration_fn = calibration_builder.build(calibration_config)
# batch_size = 2, num_classes = 2, num_anchors = 2.
class_predictions_with_background = tf.constant(
@@ -140,7 +143,8 @@ class CalibrationBuilderTest(tf.test.TestCase):
# Everything should map to 0.5 if classes are ignored.
calibrated_scores = calibration_fn(class_predictions_with_background)
- calibrated_scores_np = sess.run(calibrated_scores)
+ return calibrated_scores
+ calibrated_scores_np = self.execute(graph_fn, [])
self.assertAllClose(calibrated_scores_np, [[[0.05, 0.1, 0.15],
[0.2, 0.25, 0.0]],
[[0.35, 0.45, 0.55],
@@ -161,8 +165,7 @@ class CalibrationBuilderTest(tf.test.TestCase):
self._add_function_approximation_to_calibration_proto(
calibration_config, class_1_x, class_1_y, class_id=1)
- od_graph = tf.Graph()
- with self.test_session(graph=od_graph) as sess:
+ def graph_fn():
calibration_fn = calibration_builder.build(calibration_config)
# batch_size = 2, num_classes = 2, num_anchors = 2.
class_predictions_with_background = tf.constant(
@@ -170,7 +173,8 @@ class CalibrationBuilderTest(tf.test.TestCase):
[[0.6, 0.4], [0.08, 0.92]]],
dtype=tf.float32)
calibrated_scores = calibration_fn(class_predictions_with_background)
- calibrated_scores_np = sess.run(calibrated_scores)
+ return calibrated_scores
+ calibrated_scores_np = self.execute(graph_fn, [])
self.assertAllClose(calibrated_scores_np, [[[0.5, 0.6], [0.5, 0.3]],
[[0.5, 0.7], [0.5, 0.96]]])
@@ -179,8 +183,7 @@ class CalibrationBuilderTest(tf.test.TestCase):
calibration_config = calibration_pb2.CalibrationConfig()
calibration_config.temperature_scaling_calibration.scaler = 2.0
- od_graph = tf.Graph()
- with self.test_session(graph=od_graph) as sess:
+ def graph_fn():
calibration_fn = calibration_builder.build(calibration_config)
# batch_size = 2, num_classes = 2, num_anchors = 2.
class_predictions_with_background = tf.constant(
@@ -188,7 +191,8 @@ class CalibrationBuilderTest(tf.test.TestCase):
[[0.6, 0.7, 0.8], [0.9, 1.0, 1.0]]],
dtype=tf.float32)
calibrated_scores = calibration_fn(class_predictions_with_background)
- calibrated_scores_np = sess.run(calibrated_scores)
+ return calibrated_scores
+ calibrated_scores_np = self.execute(graph_fn, [])
self.assertAllClose(calibrated_scores_np,
[[[0.05, 0.1, 0.15], [0.2, 0.25, 0.0]],
[[0.3, 0.35, 0.4], [0.45, 0.5, 0.5]]])
@@ -212,8 +216,7 @@ class CalibrationBuilderTest(tf.test.TestCase):
calibration_config = calibration_pb2.CalibrationConfig()
self._add_function_approximation_to_calibration_proto(
calibration_config, class_0_x, class_0_y, class_id=0)
- od_graph = tf.Graph()
- with self.test_session(graph=od_graph) as sess:
+ def graph_fn():
calibration_fn = calibration_builder.build(calibration_config)
# batch_size = 2, num_classes = 2, num_anchors = 2.
class_predictions_with_background = tf.constant(
@@ -221,7 +224,8 @@ class CalibrationBuilderTest(tf.test.TestCase):
[[0.6, 0.4], [0.08, 0.92]]],
dtype=tf.float32)
calibrated_scores = calibration_fn(class_predictions_with_background)
- calibrated_scores_np = sess.run(calibrated_scores)
+ return calibrated_scores
+ calibrated_scores_np = self.execute(graph_fn, [])
self.assertAllClose(calibrated_scores_np, [[[0.5, 0.2], [0.5, 0.1]],
[[0.5, 0.4], [0.5, 0.92]]])
diff --git a/research/object_detection/builders/dataset_builder.py b/research/object_detection/builders/dataset_builder.py
index 772086619a921335fd671a232a9917f51e7f58ce..c1c1ce3ecd17c2625585cd83f080b49c0150151a 100644
--- a/research/object_detection/builders/dataset_builder.py
+++ b/research/object_detection/builders/dataset_builder.py
@@ -29,7 +29,6 @@ from __future__ import print_function
import functools
import tensorflow.compat.v1 as tf
-from tensorflow.contrib import data as tf_data
from object_detection.builders import decoder_builder
from object_detection.protos import input_reader_pb2
@@ -94,7 +93,7 @@ def read_dataset(file_read_func, input_files, config,
filename_dataset = filename_dataset.repeat(config.num_epochs or None)
records_dataset = filename_dataset.apply(
- tf_data.parallel_interleave(
+ tf.data.experimental.parallel_interleave(
file_read_func,
cycle_length=num_readers,
block_length=config.read_block_length,
@@ -153,6 +152,30 @@ def build(input_reader_config, batch_size=None, transform_input_data_fn=None,
if not config.input_path:
raise ValueError('At least one input path must be specified in '
'`input_reader_config`.')
+ def dataset_map_fn(dataset, fn_to_map, batch_size=None,
+ input_reader_config=None):
+ """Handles whether or not to use the legacy map function.
+
+ Args:
+ dataset: A tf.Dataset.
+ fn_to_map: The function to be mapped for that dataset.
+ batch_size: Batch size. If batch size is None, no batching is performed.
+ input_reader_config: A input_reader_pb2.InputReader object.
+
+ Returns:
+ A tf.data.Dataset mapped with fn_to_map.
+ """
+ if hasattr(dataset, 'map_with_legacy_function'):
+ if batch_size:
+ num_parallel_calls = batch_size * (
+ input_reader_config.num_parallel_batches)
+ else:
+ num_parallel_calls = input_reader_config.num_parallel_map_calls
+ dataset = dataset.map_with_legacy_function(
+ fn_to_map, num_parallel_calls=num_parallel_calls)
+ else:
+ dataset = dataset.map(fn_to_map, tf.data.experimental.AUTOTUNE)
+ return dataset
shard_fn = shard_function_for_context(input_context)
if input_context is not None:
batch_size = input_context.get_per_replica_batch_size(batch_size)
@@ -163,15 +186,16 @@ def build(input_reader_config, batch_size=None, transform_input_data_fn=None,
dataset = dataset.shard(input_reader_config.sample_1_of_n_examples, 0)
# TODO(rathodv): make batch size a required argument once the old binaries
# are deleted.
- dataset = dataset.map(decoder.decode, tf.data.experimental.AUTOTUNE)
+ dataset = dataset_map_fn(dataset, decoder.decode, batch_size,
+ input_reader_config)
if reduce_to_frame_fn:
- dataset = reduce_to_frame_fn(dataset)
+ dataset = reduce_to_frame_fn(dataset, dataset_map_fn, batch_size,
+ input_reader_config)
if transform_input_data_fn is not None:
- dataset = dataset.map(transform_input_data_fn,
- tf.data.experimental.AUTOTUNE)
+ dataset = dataset_map_fn(dataset, transform_input_data_fn,
+ batch_size, input_reader_config)
if batch_size:
- dataset = dataset.apply(
- tf_data.batch_and_drop_remainder(batch_size))
+ dataset = dataset.batch(batch_size, drop_remainder=True)
dataset = dataset.prefetch(input_reader_config.num_prefetch_batches)
return dataset
diff --git a/research/object_detection/builders/dataset_builder_test.py b/research/object_detection/builders/dataset_builder_test.py
index 741ff3bcf84b5dae608e1e79241921f7e23ee3d1..eb2cdb3ccbd891e5f089281d9b506d636d26d6a9 100644
--- a/research/object_detection/builders/dataset_builder_test.py
+++ b/research/object_detection/builders/dataset_builder_test.py
@@ -197,13 +197,13 @@ class DatasetBuilderTest(test_case.TestCase):
output_dict[fields.InputDataFields.groundtruth_boxes][0][0])
def get_mock_reduce_to_frame_fn(self):
- def mock_reduce_to_frame_fn(dataset):
+ def mock_reduce_to_frame_fn(dataset, dataset_map_fn, batch_size, config):
def get_frame(tensor_dict):
out_tensor_dict = {}
out_tensor_dict[fields.InputDataFields.source_id] = (
tensor_dict[fields.InputDataFields.source_id][0])
return out_tensor_dict
- return dataset.map(get_frame, tf.data.experimental.AUTOTUNE)
+ return dataset_map_fn(dataset, get_frame, batch_size, config)
return mock_reduce_to_frame_fn
def test_build_tf_record_input_reader_sequence_example_train(self):
@@ -390,7 +390,7 @@ class DatasetBuilderTest(test_case.TestCase):
return iter1.get_next(), iter2.get_next()
output_dict1, output_dict2 = self.execute(graph_fn, [])
- self.assertAllEqual(['0'], output_dict1[fields.InputDataFields.source_id])
+ self.assertAllEqual([b'0'], output_dict1[fields.InputDataFields.source_id])
self.assertEqual([b'1'], output_dict2[fields.InputDataFields.source_id])
def test_sample_one_of_n_shards(self):
@@ -537,8 +537,15 @@ class ReadDatasetTest(test_case.TestCase):
def graph_fn():
keys = [1, 0, -1]
dataset = tf.data.Dataset.from_tensor_slices([[1, 2, -1, 5]])
- table = contrib_lookup.HashTable(
- initializer=contrib_lookup.KeyValueTensorInitializer(
+ try:
+ # Dynamically try to load the tf v2 lookup, falling back to contrib
+ lookup = tf.compat.v2.lookup
+ hash_table_class = tf.compat.v2.lookup.StaticHashTable
+ except AttributeError:
+ lookup = contrib_lookup
+ hash_table_class = contrib_lookup.HashTable
+ table = hash_table_class(
+ initializer=lookup.KeyValueTensorInitializer(
keys=keys, values=list(reversed(keys))),
default_value=100)
dataset = dataset.map(table.lookup)
@@ -559,7 +566,7 @@ class ReadDatasetTest(test_case.TestCase):
data = self.execute(graph_fn, [])
# Note that the execute function extracts single outputs if the return
# value is of size 1.
- self.assertAllEqual(
+ self.assertCountEqual(
data, [
1, 10, 2, 20, 3, 30, 4, 40, 5, 50, 1, 10, 2, 20, 3, 30, 4, 40, 5,
50
@@ -577,7 +584,7 @@ class ReadDatasetTest(test_case.TestCase):
data = self.execute(graph_fn, [])
# Note that the execute function extracts single outputs if the return
# value is of size 1.
- self.assertAllEqual(
+ self.assertCountEqual(
data, [
1, 10, 2, 20, 3, 30, 4, 40, 5, 50, 1, 10, 2, 20, 3, 30, 4, 40, 5,
50
@@ -607,12 +614,14 @@ class ReadDatasetTest(test_case.TestCase):
def graph_fn():
return self._get_dataset_next(
[self._shuffle_path_template % '*'], config, batch_size=10)
- expected_non_shuffle_output = [0, 0, 0, 0, 0, 1, 1, 1, 1, 1]
+ expected_non_shuffle_output1 = [0, 0, 0, 0, 0, 1, 1, 1, 1, 1]
+ expected_non_shuffle_output2 = [1, 1, 1, 1, 1, 0, 0, 0, 0, 0]
# Note that the execute function extracts single outputs if the return
# value is of size 1.
data = self.execute(graph_fn, [])
- self.assertAllEqual(data, expected_non_shuffle_output)
+ self.assertTrue(all(data == expected_non_shuffle_output1) or
+ all(data == expected_non_shuffle_output2))
def test_read_dataset_single_epoch(self):
config = input_reader_pb2.InputReader()
diff --git a/research/object_detection/builders/decoder_builder.py b/research/object_detection/builders/decoder_builder.py
index d3cac57d0813c11451b7f92197136d9ea8844af2..59880735cd3fd6be3d4e9c567af615227d0a1fb1 100644
--- a/research/object_detection/builders/decoder_builder.py
+++ b/research/object_detection/builders/decoder_builder.py
@@ -48,7 +48,7 @@ def build(input_reader_config):
if input_reader_config.HasField('label_map_path'):
label_map_proto_file = input_reader_config.label_map_path
input_type = input_reader_config.input_type
- if input_type == input_reader_pb2.InputType.TF_EXAMPLE:
+ if input_type == input_reader_pb2.InputType.Value('TF_EXAMPLE'):
decoder = tf_example_decoder.TfExampleDecoder(
load_instance_masks=input_reader_config.load_instance_masks,
load_multiclass_scores=input_reader_config.load_multiclass_scores,
@@ -60,7 +60,7 @@ def build(input_reader_config):
num_keypoints=input_reader_config.num_keypoints,
expand_hierarchy_labels=input_reader_config.expand_labels_hierarchy)
return decoder
- elif input_type == input_reader_pb2.InputType.TF_SEQUENCE_EXAMPLE:
+ elif input_type == input_reader_pb2.InputType.Value('TF_SEQUENCE_EXAMPLE'):
decoder = tf_sequence_example_decoder.TfSequenceExampleDecoder(
label_map_proto_file=label_map_proto_file,
load_context_features=input_reader_config.load_context_features)
diff --git a/research/object_detection/builders/decoder_builder_test.py b/research/object_detection/builders/decoder_builder_test.py
index 767c108e94ae7840f9d3175e360dd2bcaa99dbbf..d45285fd19f7648ab4d9365b155ba35a2ce0d3ed 100644
--- a/research/object_detection/builders/decoder_builder_test.py
+++ b/research/object_detection/builders/decoder_builder_test.py
@@ -29,6 +29,7 @@ from object_detection.core import standard_fields as fields
from object_detection.dataset_tools import seq_example_util
from object_detection.protos import input_reader_pb2
from object_detection.utils import dataset_util
+from object_detection.utils import test_case
def _get_labelmap_path():
@@ -38,17 +39,20 @@ def _get_labelmap_path():
'pet_label_map.pbtxt')
-class DecoderBuilderTest(tf.test.TestCase):
+class DecoderBuilderTest(test_case.TestCase):
def _make_serialized_tf_example(self, has_additional_channels=False):
- image_tensor = np.random.randint(255, size=(4, 5, 3)).astype(np.uint8)
- additional_channels_tensor = np.random.randint(
+ image_tensor_np = np.random.randint(255, size=(4, 5, 3)).astype(np.uint8)
+ additional_channels_tensor_np = np.random.randint(
255, size=(4, 5, 1)).astype(np.uint8)
flat_mask = (4 * 5) * [1.0]
- with self.test_session():
- encoded_jpeg = tf.image.encode_jpeg(tf.constant(image_tensor)).eval()
- encoded_additional_channels_jpeg = tf.image.encode_jpeg(
- tf.constant(additional_channels_tensor)).eval()
+ def graph_fn(image_tensor):
+ encoded_jpeg = tf.image.encode_jpeg(image_tensor)
+ return encoded_jpeg
+ encoded_jpeg = self.execute_cpu(graph_fn, [image_tensor_np])
+ encoded_additional_channels_jpeg = self.execute_cpu(
+ graph_fn, [additional_channels_tensor_np])
+
features = {
'image/source_id': dataset_util.bytes_feature('0'.encode()),
'image/encoded': dataset_util.bytes_feature(encoded_jpeg),
@@ -71,46 +75,45 @@ class DecoderBuilderTest(tf.test.TestCase):
def _make_random_serialized_jpeg_images(self, num_frames, image_height,
image_width):
- images = tf.cast(tf.random.uniform(
- [num_frames, image_height, image_width, 3],
- maxval=256,
- dtype=tf.int32), dtype=tf.uint8)
- images_list = tf.unstack(images, axis=0)
- encoded_images_list = [tf.io.encode_jpeg(image) for image in images_list]
- with tf.Session() as sess:
- encoded_images = sess.run(encoded_images_list)
- return encoded_images
+ def graph_fn():
+ images = tf.cast(tf.random.uniform(
+ [num_frames, image_height, image_width, 3],
+ maxval=256,
+ dtype=tf.int32), dtype=tf.uint8)
+ images_list = tf.unstack(images, axis=0)
+ encoded_images = [tf.io.encode_jpeg(image) for image in images_list]
+ return encoded_images
+ return self.execute_cpu(graph_fn, [])
def _make_serialized_tf_sequence_example(self):
num_frames = 4
image_height = 20
image_width = 30
image_source_ids = [str(i) for i in range(num_frames)]
- with self.test_session():
- encoded_images = self._make_random_serialized_jpeg_images(
- num_frames, image_height, image_width)
- sequence_example_serialized = seq_example_util.make_sequence_example(
- dataset_name='video_dataset',
- video_id='video',
- encoded_images=encoded_images,
- image_height=image_height,
- image_width=image_width,
- image_source_ids=image_source_ids,
- image_format='JPEG',
- is_annotated=[[1], [1], [1], [1]],
- bboxes=[
- [[]], # Frame 0.
- [[0., 0., 1., 1.]], # Frame 1.
- [[0., 0., 1., 1.],
- [0.1, 0.1, 0.2, 0.2]], # Frame 2.
- [[]], # Frame 3.
- ],
- label_strings=[
- [], # Frame 0.
- ['Abyssinian'], # Frame 1.
- ['Abyssinian', 'american_bulldog'], # Frame 2.
- [], # Frame 3
- ]).SerializeToString()
+ encoded_images = self._make_random_serialized_jpeg_images(
+ num_frames, image_height, image_width)
+ sequence_example_serialized = seq_example_util.make_sequence_example(
+ dataset_name='video_dataset',
+ video_id='video',
+ encoded_images=encoded_images,
+ image_height=image_height,
+ image_width=image_width,
+ image_source_ids=image_source_ids,
+ image_format='JPEG',
+ is_annotated=[[1], [1], [1], [1]],
+ bboxes=[
+ [[]], # Frame 0.
+ [[0., 0., 1., 1.]], # Frame 1.
+ [[0., 0., 1., 1.],
+ [0.1, 0.1, 0.2, 0.2]], # Frame 2.
+ [[]], # Frame 3.
+ ],
+ label_strings=[
+ [], # Frame 0.
+ ['Abyssinian'], # Frame 1.
+ ['Abyssinian', 'american_bulldog'], # Frame 2.
+ [], # Frame 3
+ ]).SerializeToString()
return sequence_example_serialized
def test_build_tf_record_input_reader(self):
@@ -119,21 +122,19 @@ class DecoderBuilderTest(tf.test.TestCase):
text_format.Parse(input_reader_text_proto, input_reader_proto)
decoder = decoder_builder.build(input_reader_proto)
- tensor_dict = decoder.decode(self._make_serialized_tf_example())
-
- with tf.train.MonitoredSession() as sess:
- output_dict = sess.run(tensor_dict)
-
- self.assertNotIn(
- fields.InputDataFields.groundtruth_instance_masks, output_dict)
- self.assertEqual((4, 5, 3), output_dict[fields.InputDataFields.image].shape)
- self.assertAllEqual([2],
- output_dict[fields.InputDataFields.groundtruth_classes])
- self.assertEqual(
- (1, 4), output_dict[fields.InputDataFields.groundtruth_boxes].shape)
- self.assertAllEqual(
- [0.0, 0.0, 1.0, 1.0],
- output_dict[fields.InputDataFields.groundtruth_boxes][0])
+ serialized_seq_example = self._make_serialized_tf_example()
+ def graph_fn():
+ tensor_dict = decoder.decode(serialized_seq_example)
+ return (tensor_dict[fields.InputDataFields.image],
+ tensor_dict[fields.InputDataFields.groundtruth_classes],
+ tensor_dict[fields.InputDataFields.groundtruth_boxes])
+
+ (image, groundtruth_classes,
+ groundtruth_boxes) = self.execute_cpu(graph_fn, [])
+ self.assertEqual((4, 5, 3), image.shape)
+ self.assertAllEqual([2], groundtruth_classes)
+ self.assertEqual((1, 4), groundtruth_boxes.shape)
+ self.assertAllEqual([0.0, 0.0, 1.0, 1.0], groundtruth_boxes[0])
def test_build_tf_record_input_reader_sequence_example(self):
label_map_path = _get_labelmap_path()
@@ -145,12 +146,16 @@ class DecoderBuilderTest(tf.test.TestCase):
input_reader_proto.label_map_path = label_map_path
text_format.Parse(input_reader_text_proto, input_reader_proto)
- decoder = decoder_builder.build(input_reader_proto)
- tensor_dict = decoder.decode(self._make_serialized_tf_sequence_example())
-
- with tf.train.MonitoredSession() as sess:
- output_dict = sess.run(tensor_dict)
-
+ serialized_seq_example = self._make_serialized_tf_sequence_example()
+ def graph_fn():
+ decoder = decoder_builder.build(input_reader_proto)
+ tensor_dict = decoder.decode(serialized_seq_example)
+ return (tensor_dict[fields.InputDataFields.image],
+ tensor_dict[fields.InputDataFields.groundtruth_classes],
+ tensor_dict[fields.InputDataFields.groundtruth_boxes],
+ tensor_dict[fields.InputDataFields.num_groundtruth_boxes])
+ (actual_image, actual_groundtruth_classes, actual_groundtruth_boxes,
+ actual_num_groundtruth_boxes) = self.execute_cpu(graph_fn, [])
expected_groundtruth_classes = [[-1, -1], [1, -1], [1, 2], [-1, -1]]
expected_groundtruth_boxes = [[[0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0]],
[[0.0, 0.0, 1.0, 1.0], [0.0, 0.0, 0.0, 0.0]],
@@ -158,19 +163,14 @@ class DecoderBuilderTest(tf.test.TestCase):
[[0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0]]]
expected_num_groundtruth_boxes = [0, 1, 2, 0]
- self.assertNotIn(
- fields.InputDataFields.groundtruth_instance_masks, output_dict)
# Sequence example images are encoded.
- self.assertEqual((4,), output_dict[fields.InputDataFields.image].shape)
+ self.assertEqual((4,), actual_image.shape)
self.assertAllEqual(expected_groundtruth_classes,
- output_dict[fields.InputDataFields.groundtruth_classes])
- self.assertEqual(
- (4, 2, 4), output_dict[fields.InputDataFields.groundtruth_boxes].shape)
+ actual_groundtruth_classes)
self.assertAllClose(expected_groundtruth_boxes,
- output_dict[fields.InputDataFields.groundtruth_boxes])
+ actual_groundtruth_boxes)
self.assertAllClose(
- expected_num_groundtruth_boxes,
- output_dict[fields.InputDataFields.num_groundtruth_boxes])
+ expected_num_groundtruth_boxes, actual_num_groundtruth_boxes)
def test_build_tf_record_input_reader_and_load_instance_masks(self):
input_reader_text_proto = """
@@ -181,14 +181,12 @@ class DecoderBuilderTest(tf.test.TestCase):
text_format.Parse(input_reader_text_proto, input_reader_proto)
decoder = decoder_builder.build(input_reader_proto)
- tensor_dict = decoder.decode(self._make_serialized_tf_example())
-
- with tf.train.MonitoredSession() as sess:
- output_dict = sess.run(tensor_dict)
-
- self.assertAllEqual(
- (1, 4, 5),
- output_dict[fields.InputDataFields.groundtruth_instance_masks].shape)
+ serialized_seq_example = self._make_serialized_tf_example()
+ def graph_fn():
+ tensor_dict = decoder.decode(serialized_seq_example)
+ return tensor_dict[fields.InputDataFields.groundtruth_instance_masks]
+ masks = self.execute_cpu(graph_fn, [])
+ self.assertAllEqual((1, 4, 5), masks.shape)
if __name__ == '__main__':
diff --git a/research/object_detection/builders/graph_rewriter_builder_test.py b/research/object_detection/builders/graph_rewriter_builder_tf1_test.py
similarity index 91%
rename from research/object_detection/builders/graph_rewriter_builder_test.py
rename to research/object_detection/builders/graph_rewriter_builder_tf1_test.py
index 02692ce91089e1f8f6a01fa27267d51ec0c516d1..8af8fe9627bf3041b0499909cc298d1790810753 100644
--- a/research/object_detection/builders/graph_rewriter_builder_test.py
+++ b/research/object_detection/builders/graph_rewriter_builder_tf1_test.py
@@ -13,22 +13,21 @@
# limitations under the License.
# ==============================================================================
"""Tests for graph_rewriter_builder."""
+import unittest
import mock
import tensorflow.compat.v1 as tf
import tf_slim as slim
from object_detection.builders import graph_rewriter_builder
from object_detection.protos import graph_rewriter_pb2
+from object_detection.utils import tf_version
-# pylint: disable=g-import-not-at-top
-try:
- from tensorflow.contrib import quantize as contrib_quantize
-except ImportError:
- # TF 2.0 doesn't ship with contrib.
- pass
-# pylint: enable=g-import-not-at-top
+if tf_version.is_tf1():
+ from tensorflow.contrib import quantize as contrib_quantize # pylint: disable=g-import-not-at-top
+
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.')
class QuantizationBuilderTest(tf.test.TestCase):
def testQuantizationBuilderSetsUpCorrectTrainArguments(self):
diff --git a/research/object_detection/builders/hyperparams_builder.py b/research/object_detection/builders/hyperparams_builder.py
index f34e1112a81bd9dad1c30ba39af6b1a20a252d2c..90aef43ac1bd92fb86dbd730cdb0420858572c18 100644
--- a/research/object_detection/builders/hyperparams_builder.py
+++ b/research/object_detection/builders/hyperparams_builder.py
@@ -64,6 +64,7 @@ class KerasLayerHyperparams(object):
self._batch_norm_params = _build_keras_batch_norm_params(
hyperparams_config.batch_norm)
+ self._force_use_bias = hyperparams_config.force_use_bias
self._activation_fn = _build_activation_fn(hyperparams_config.activation)
# TODO(kaftan): Unclear if these kwargs apply to separable & depthwise conv
# (Those might use depthwise_* instead of kernel_*)
@@ -80,6 +81,13 @@ class KerasLayerHyperparams(object):
def use_batch_norm(self):
return self._batch_norm_params is not None
+ def force_use_bias(self):
+ return self._force_use_bias
+
+ def use_bias(self):
+ return (self._force_use_bias or not
+ (self.use_batch_norm() and self.batch_norm_params()['center']))
+
def batch_norm_params(self, **overrides):
"""Returns a dict containing batchnorm layer construction hyperparameters.
@@ -168,10 +176,7 @@ class KerasLayerHyperparams(object):
new_params['activation'] = None
if include_activation:
new_params['activation'] = self._activation_fn
- if self.use_batch_norm() and self.batch_norm_params()['center']:
- new_params['use_bias'] = False
- else:
- new_params['use_bias'] = True
+ new_params['use_bias'] = self.use_bias()
new_params.update(**overrides)
return new_params
@@ -210,6 +215,10 @@ def build(hyperparams_config, is_training):
raise ValueError('hyperparams_config not of type '
'hyperparams_pb.Hyperparams.')
+ if hyperparams_config.force_use_bias:
+ raise ValueError('Hyperparams force_use_bias only supported by '
+ 'KerasLayerHyperparams.')
+
normalizer_fn = None
batch_norm_params = None
if hyperparams_config.HasField('batch_norm'):
diff --git a/research/object_detection/builders/hyperparams_builder_test.py b/research/object_detection/builders/hyperparams_builder_test.py
index 0f92f7d75343f8423293be0239651820d433fcb1..e48ac23bcb547c9729038b901a9612d3712d69cb 100644
--- a/research/object_detection/builders/hyperparams_builder_test.py
+++ b/research/object_detection/builders/hyperparams_builder_test.py
@@ -16,6 +16,7 @@
"""Tests object_detection.core.hyperparams_builder."""
+import unittest
import numpy as np
import tensorflow.compat.v1 as tf
import tf_slim as slim
@@ -24,12 +25,14 @@ from google.protobuf import text_format
from object_detection.builders import hyperparams_builder
from object_detection.core import freezable_batch_norm
from object_detection.protos import hyperparams_pb2
+from object_detection.utils import tf_version
def _get_scope_key(op):
return getattr(op, '_key_op', str(op))
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only tests.')
class HyperparamsBuilderTest(tf.test.TestCase):
def test_default_arg_scope_has_conv2d_op(self):
@@ -149,29 +152,6 @@ class HyperparamsBuilderTest(tf.test.TestCase):
result = sess.run(regularizer(tf.constant(weights)))
self.assertAllClose(np.abs(weights).sum() * 0.5, result)
- def test_return_l1_regularized_weights_keras(self):
- conv_hyperparams_text_proto = """
- regularizer {
- l1_regularizer {
- weight: 0.5
- }
- }
- initializer {
- truncated_normal_initializer {
- }
- }
- """
- conv_hyperparams_proto = hyperparams_pb2.Hyperparams()
- text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
- keras_config = hyperparams_builder.KerasLayerHyperparams(
- conv_hyperparams_proto)
-
- regularizer = keras_config.params()['kernel_regularizer']
- weights = np.array([1., -1, 4., 2.])
- with self.test_session() as sess:
- result = sess.run(regularizer(tf.constant(weights)))
- self.assertAllClose(np.abs(weights).sum() * 0.5, result)
-
def test_return_l2_regularizer_weights(self):
conv_hyperparams_text_proto = """
regularizer {
@@ -197,30 +177,39 @@ class HyperparamsBuilderTest(tf.test.TestCase):
result = sess.run(regularizer(tf.constant(weights)))
self.assertAllClose(np.power(weights, 2).sum() / 2.0 * 0.42, result)
- def test_return_l2_regularizer_weights_keras(self):
+ def test_return_non_default_batch_norm_params_with_train_during_train(self):
conv_hyperparams_text_proto = """
regularizer {
l2_regularizer {
- weight: 0.42
}
}
initializer {
truncated_normal_initializer {
}
}
+ batch_norm {
+ decay: 0.7
+ center: false
+ scale: true
+ epsilon: 0.03
+ train: true
+ }
"""
conv_hyperparams_proto = hyperparams_pb2.Hyperparams()
text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
- keras_config = hyperparams_builder.KerasLayerHyperparams(
- conv_hyperparams_proto)
-
- regularizer = keras_config.params()['kernel_regularizer']
- weights = np.array([1., -1, 4., 2.])
- with self.test_session() as sess:
- result = sess.run(regularizer(tf.constant(weights)))
- self.assertAllClose(np.power(weights, 2).sum() / 2.0 * 0.42, result)
+ scope_fn = hyperparams_builder.build(conv_hyperparams_proto,
+ is_training=True)
+ scope = scope_fn()
+ conv_scope_arguments = scope[_get_scope_key(slim.conv2d)]
+ self.assertEqual(conv_scope_arguments['normalizer_fn'], slim.batch_norm)
+ batch_norm_params = scope[_get_scope_key(slim.batch_norm)]
+ self.assertAlmostEqual(batch_norm_params['decay'], 0.7)
+ self.assertAlmostEqual(batch_norm_params['epsilon'], 0.03)
+ self.assertFalse(batch_norm_params['center'])
+ self.assertTrue(batch_norm_params['scale'])
+ self.assertTrue(batch_norm_params['is_training'])
- def test_return_non_default_batch_norm_params_with_train_during_train(self):
+ def test_return_batch_norm_params_with_notrain_during_eval(self):
conv_hyperparams_text_proto = """
regularizer {
l2_regularizer {
@@ -241,7 +230,7 @@ class HyperparamsBuilderTest(tf.test.TestCase):
conv_hyperparams_proto = hyperparams_pb2.Hyperparams()
text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
scope_fn = hyperparams_builder.build(conv_hyperparams_proto,
- is_training=True)
+ is_training=False)
scope = scope_fn()
conv_scope_arguments = scope[_get_scope_key(slim.conv2d)]
self.assertEqual(conv_scope_arguments['normalizer_fn'], slim.batch_norm)
@@ -250,10 +239,9 @@ class HyperparamsBuilderTest(tf.test.TestCase):
self.assertAlmostEqual(batch_norm_params['epsilon'], 0.03)
self.assertFalse(batch_norm_params['center'])
self.assertTrue(batch_norm_params['scale'])
- self.assertTrue(batch_norm_params['is_training'])
+ self.assertFalse(batch_norm_params['is_training'])
- def test_return_non_default_batch_norm_params_keras(
- self):
+ def test_return_batch_norm_params_with_notrain_when_train_is_false(self):
conv_hyperparams_text_proto = """
regularizer {
l2_regularizer {
@@ -268,26 +256,43 @@ class HyperparamsBuilderTest(tf.test.TestCase):
center: false
scale: true
epsilon: 0.03
+ train: false
}
"""
conv_hyperparams_proto = hyperparams_pb2.Hyperparams()
text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
- keras_config = hyperparams_builder.KerasLayerHyperparams(
- conv_hyperparams_proto)
-
- self.assertTrue(keras_config.use_batch_norm())
- batch_norm_params = keras_config.batch_norm_params()
- self.assertAlmostEqual(batch_norm_params['momentum'], 0.7)
+ scope_fn = hyperparams_builder.build(conv_hyperparams_proto,
+ is_training=True)
+ scope = scope_fn()
+ conv_scope_arguments = scope[_get_scope_key(slim.conv2d)]
+ self.assertEqual(conv_scope_arguments['normalizer_fn'], slim.batch_norm)
+ batch_norm_params = scope[_get_scope_key(slim.batch_norm)]
+ self.assertAlmostEqual(batch_norm_params['decay'], 0.7)
self.assertAlmostEqual(batch_norm_params['epsilon'], 0.03)
self.assertFalse(batch_norm_params['center'])
self.assertTrue(batch_norm_params['scale'])
+ self.assertFalse(batch_norm_params['is_training'])
- batch_norm_layer = keras_config.build_batch_norm()
- self.assertIsInstance(batch_norm_layer,
- freezable_batch_norm.FreezableBatchNorm)
+ def test_do_not_use_batch_norm_if_default(self):
+ conv_hyperparams_text_proto = """
+ regularizer {
+ l2_regularizer {
+ }
+ }
+ initializer {
+ truncated_normal_initializer {
+ }
+ }
+ """
+ conv_hyperparams_proto = hyperparams_pb2.Hyperparams()
+ text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
+ scope_fn = hyperparams_builder.build(conv_hyperparams_proto,
+ is_training=True)
+ scope = scope_fn()
+ conv_scope_arguments = scope[_get_scope_key(slim.conv2d)]
+ self.assertEqual(conv_scope_arguments['normalizer_fn'], None)
- def test_return_non_default_batch_norm_params_keras_override(
- self):
+ def test_use_none_activation(self):
conv_hyperparams_text_proto = """
regularizer {
l2_regularizer {
@@ -297,26 +302,57 @@ class HyperparamsBuilderTest(tf.test.TestCase):
truncated_normal_initializer {
}
}
- batch_norm {
- decay: 0.7
- center: false
- scale: true
- epsilon: 0.03
+ activation: NONE
+ """
+ conv_hyperparams_proto = hyperparams_pb2.Hyperparams()
+ text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
+ scope_fn = hyperparams_builder.build(conv_hyperparams_proto,
+ is_training=True)
+ scope = scope_fn()
+ conv_scope_arguments = scope[_get_scope_key(slim.conv2d)]
+ self.assertEqual(conv_scope_arguments['activation_fn'], None)
+
+ def test_use_relu_activation(self):
+ conv_hyperparams_text_proto = """
+ regularizer {
+ l2_regularizer {
+ }
+ }
+ initializer {
+ truncated_normal_initializer {
+ }
}
+ activation: RELU
"""
conv_hyperparams_proto = hyperparams_pb2.Hyperparams()
text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
- keras_config = hyperparams_builder.KerasLayerHyperparams(
- conv_hyperparams_proto)
+ scope_fn = hyperparams_builder.build(conv_hyperparams_proto,
+ is_training=True)
+ scope = scope_fn()
+ conv_scope_arguments = scope[_get_scope_key(slim.conv2d)]
+ self.assertEqual(conv_scope_arguments['activation_fn'], tf.nn.relu)
- self.assertTrue(keras_config.use_batch_norm())
- batch_norm_params = keras_config.batch_norm_params(momentum=0.4)
- self.assertAlmostEqual(batch_norm_params['momentum'], 0.4)
- self.assertAlmostEqual(batch_norm_params['epsilon'], 0.03)
- self.assertFalse(batch_norm_params['center'])
- self.assertTrue(batch_norm_params['scale'])
+ def test_use_relu_6_activation(self):
+ conv_hyperparams_text_proto = """
+ regularizer {
+ l2_regularizer {
+ }
+ }
+ initializer {
+ truncated_normal_initializer {
+ }
+ }
+ activation: RELU_6
+ """
+ conv_hyperparams_proto = hyperparams_pb2.Hyperparams()
+ text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
+ scope_fn = hyperparams_builder.build(conv_hyperparams_proto,
+ is_training=True)
+ scope = scope_fn()
+ conv_scope_arguments = scope[_get_scope_key(slim.conv2d)]
+ self.assertEqual(conv_scope_arguments['activation_fn'], tf.nn.relu6)
- def test_return_batch_norm_params_with_notrain_during_eval(self):
+ def test_use_swish_activation(self):
conv_hyperparams_text_proto = """
regularizer {
l2_regularizer {
@@ -326,44 +362,89 @@ class HyperparamsBuilderTest(tf.test.TestCase):
truncated_normal_initializer {
}
}
- batch_norm {
- decay: 0.7
- center: false
- scale: true
- epsilon: 0.03
- train: true
+ activation: SWISH
+ """
+ conv_hyperparams_proto = hyperparams_pb2.Hyperparams()
+ text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
+ scope_fn = hyperparams_builder.build(conv_hyperparams_proto,
+ is_training=True)
+ scope = scope_fn()
+ conv_scope_arguments = scope[_get_scope_key(slim.conv2d)]
+ self.assertEqual(conv_scope_arguments['activation_fn'], tf.nn.swish)
+
+ def _assert_variance_in_range(self, initializer, shape, variance,
+ tol=1e-2):
+ with tf.Graph().as_default() as g:
+ with self.test_session(graph=g) as sess:
+ var = tf.get_variable(
+ name='test',
+ shape=shape,
+ dtype=tf.float32,
+ initializer=initializer)
+ sess.run(tf.global_variables_initializer())
+ values = sess.run(var)
+ self.assertAllClose(np.var(values), variance, tol, tol)
+
+ def test_variance_in_range_with_variance_scaling_initializer_fan_in(self):
+ conv_hyperparams_text_proto = """
+ regularizer {
+ l2_regularizer {
+ }
+ }
+ initializer {
+ variance_scaling_initializer {
+ factor: 2.0
+ mode: FAN_IN
+ uniform: false
+ }
}
"""
conv_hyperparams_proto = hyperparams_pb2.Hyperparams()
text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
scope_fn = hyperparams_builder.build(conv_hyperparams_proto,
- is_training=False)
+ is_training=True)
scope = scope_fn()
conv_scope_arguments = scope[_get_scope_key(slim.conv2d)]
- self.assertEqual(conv_scope_arguments['normalizer_fn'], slim.batch_norm)
- batch_norm_params = scope[_get_scope_key(slim.batch_norm)]
- self.assertAlmostEqual(batch_norm_params['decay'], 0.7)
- self.assertAlmostEqual(batch_norm_params['epsilon'], 0.03)
- self.assertFalse(batch_norm_params['center'])
- self.assertTrue(batch_norm_params['scale'])
- self.assertFalse(batch_norm_params['is_training'])
+ initializer = conv_scope_arguments['weights_initializer']
+ self._assert_variance_in_range(initializer, shape=[100, 40],
+ variance=2. / 100.)
- def test_return_batch_norm_params_with_notrain_when_train_is_false(self):
+ def test_variance_in_range_with_variance_scaling_initializer_fan_out(self):
conv_hyperparams_text_proto = """
regularizer {
l2_regularizer {
}
}
initializer {
- truncated_normal_initializer {
+ variance_scaling_initializer {
+ factor: 2.0
+ mode: FAN_OUT
+ uniform: false
}
}
- batch_norm {
- decay: 0.7
- center: false
- scale: true
- epsilon: 0.03
- train: false
+ """
+ conv_hyperparams_proto = hyperparams_pb2.Hyperparams()
+ text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
+ scope_fn = hyperparams_builder.build(conv_hyperparams_proto,
+ is_training=True)
+ scope = scope_fn()
+ conv_scope_arguments = scope[_get_scope_key(slim.conv2d)]
+ initializer = conv_scope_arguments['weights_initializer']
+ self._assert_variance_in_range(initializer, shape=[100, 40],
+ variance=2. / 40.)
+
+ def test_variance_in_range_with_variance_scaling_initializer_fan_avg(self):
+ conv_hyperparams_text_proto = """
+ regularizer {
+ l2_regularizer {
+ }
+ }
+ initializer {
+ variance_scaling_initializer {
+ factor: 2.0
+ mode: FAN_AVG
+ uniform: false
+ }
}
"""
conv_hyperparams_proto = hyperparams_pb2.Hyperparams()
@@ -372,15 +453,35 @@ class HyperparamsBuilderTest(tf.test.TestCase):
is_training=True)
scope = scope_fn()
conv_scope_arguments = scope[_get_scope_key(slim.conv2d)]
- self.assertEqual(conv_scope_arguments['normalizer_fn'], slim.batch_norm)
- batch_norm_params = scope[_get_scope_key(slim.batch_norm)]
- self.assertAlmostEqual(batch_norm_params['decay'], 0.7)
- self.assertAlmostEqual(batch_norm_params['epsilon'], 0.03)
- self.assertFalse(batch_norm_params['center'])
- self.assertTrue(batch_norm_params['scale'])
- self.assertFalse(batch_norm_params['is_training'])
+ initializer = conv_scope_arguments['weights_initializer']
+ self._assert_variance_in_range(initializer, shape=[100, 40],
+ variance=4. / (100. + 40.))
- def test_do_not_use_batch_norm_if_default(self):
+ def test_variance_in_range_with_variance_scaling_initializer_uniform(self):
+ conv_hyperparams_text_proto = """
+ regularizer {
+ l2_regularizer {
+ }
+ }
+ initializer {
+ variance_scaling_initializer {
+ factor: 2.0
+ mode: FAN_IN
+ uniform: true
+ }
+ }
+ """
+ conv_hyperparams_proto = hyperparams_pb2.Hyperparams()
+ text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
+ scope_fn = hyperparams_builder.build(conv_hyperparams_proto,
+ is_training=True)
+ scope = scope_fn()
+ conv_scope_arguments = scope[_get_scope_key(slim.conv2d)]
+ initializer = conv_scope_arguments['weights_initializer']
+ self._assert_variance_in_range(initializer, shape=[100, 40],
+ variance=2. / 100.)
+
+ def test_variance_in_range_with_truncated_normal_initializer(self):
conv_hyperparams_text_proto = """
regularizer {
l2_regularizer {
@@ -388,6 +489,8 @@ class HyperparamsBuilderTest(tf.test.TestCase):
}
initializer {
truncated_normal_initializer {
+ mean: 0.0
+ stddev: 0.8
}
}
"""
@@ -397,7 +500,149 @@ class HyperparamsBuilderTest(tf.test.TestCase):
is_training=True)
scope = scope_fn()
conv_scope_arguments = scope[_get_scope_key(slim.conv2d)]
- self.assertEqual(conv_scope_arguments['normalizer_fn'], None)
+ initializer = conv_scope_arguments['weights_initializer']
+ self._assert_variance_in_range(initializer, shape=[100, 40],
+ variance=0.49, tol=1e-1)
+
+ def test_variance_in_range_with_random_normal_initializer(self):
+ conv_hyperparams_text_proto = """
+ regularizer {
+ l2_regularizer {
+ }
+ }
+ initializer {
+ random_normal_initializer {
+ mean: 0.0
+ stddev: 0.8
+ }
+ }
+ """
+ conv_hyperparams_proto = hyperparams_pb2.Hyperparams()
+ text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
+ scope_fn = hyperparams_builder.build(conv_hyperparams_proto,
+ is_training=True)
+ scope = scope_fn()
+ conv_scope_arguments = scope[_get_scope_key(slim.conv2d)]
+ initializer = conv_scope_arguments['weights_initializer']
+ self._assert_variance_in_range(initializer, shape=[100, 40],
+ variance=0.64, tol=1e-1)
+
+
+@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only tests.')
+class KerasHyperparamsBuilderTest(tf.test.TestCase):
+
+ def _assert_variance_in_range(self, initializer, shape, variance,
+ tol=1e-2):
+ var = tf.Variable(initializer(shape=shape, dtype=tf.float32))
+ self.assertAllClose(np.var(var.numpy()), variance, tol, tol)
+
+ def test_return_l1_regularized_weights_keras(self):
+ conv_hyperparams_text_proto = """
+ regularizer {
+ l1_regularizer {
+ weight: 0.5
+ }
+ }
+ initializer {
+ truncated_normal_initializer {
+ }
+ }
+ """
+ conv_hyperparams_proto = hyperparams_pb2.Hyperparams()
+ text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
+ keras_config = hyperparams_builder.KerasLayerHyperparams(
+ conv_hyperparams_proto)
+
+ regularizer = keras_config.params()['kernel_regularizer']
+ weights = np.array([1., -1, 4., 2.])
+ result = regularizer(tf.constant(weights)).numpy()
+ self.assertAllClose(np.abs(weights).sum() * 0.5, result)
+
+ def test_return_l2_regularizer_weights_keras(self):
+ conv_hyperparams_text_proto = """
+ regularizer {
+ l2_regularizer {
+ weight: 0.42
+ }
+ }
+ initializer {
+ truncated_normal_initializer {
+ }
+ }
+ """
+ conv_hyperparams_proto = hyperparams_pb2.Hyperparams()
+ text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
+ keras_config = hyperparams_builder.KerasLayerHyperparams(
+ conv_hyperparams_proto)
+
+ regularizer = keras_config.params()['kernel_regularizer']
+ weights = np.array([1., -1, 4., 2.])
+ result = regularizer(tf.constant(weights)).numpy()
+ self.assertAllClose(np.power(weights, 2).sum() / 2.0 * 0.42, result)
+
+ def test_return_non_default_batch_norm_params_keras(
+ self):
+ conv_hyperparams_text_proto = """
+ regularizer {
+ l2_regularizer {
+ }
+ }
+ initializer {
+ truncated_normal_initializer {
+ }
+ }
+ batch_norm {
+ decay: 0.7
+ center: false
+ scale: true
+ epsilon: 0.03
+ }
+ """
+ conv_hyperparams_proto = hyperparams_pb2.Hyperparams()
+ text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
+ keras_config = hyperparams_builder.KerasLayerHyperparams(
+ conv_hyperparams_proto)
+
+ self.assertTrue(keras_config.use_batch_norm())
+ batch_norm_params = keras_config.batch_norm_params()
+ self.assertAlmostEqual(batch_norm_params['momentum'], 0.7)
+ self.assertAlmostEqual(batch_norm_params['epsilon'], 0.03)
+ self.assertFalse(batch_norm_params['center'])
+ self.assertTrue(batch_norm_params['scale'])
+
+ batch_norm_layer = keras_config.build_batch_norm()
+ self.assertIsInstance(batch_norm_layer,
+ freezable_batch_norm.FreezableBatchNorm)
+
+ def test_return_non_default_batch_norm_params_keras_override(
+ self):
+ conv_hyperparams_text_proto = """
+ regularizer {
+ l2_regularizer {
+ }
+ }
+ initializer {
+ truncated_normal_initializer {
+ }
+ }
+ batch_norm {
+ decay: 0.7
+ center: false
+ scale: true
+ epsilon: 0.03
+ }
+ """
+ conv_hyperparams_proto = hyperparams_pb2.Hyperparams()
+ text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
+ keras_config = hyperparams_builder.KerasLayerHyperparams(
+ conv_hyperparams_proto)
+
+ self.assertTrue(keras_config.use_batch_norm())
+ batch_norm_params = keras_config.batch_norm_params(momentum=0.4)
+ self.assertAlmostEqual(batch_norm_params['momentum'], 0.4)
+ self.assertAlmostEqual(batch_norm_params['epsilon'], 0.03)
+ self.assertFalse(batch_norm_params['center'])
+ self.assertTrue(batch_norm_params['scale'])
def test_do_not_use_batch_norm_if_default_keras(self):
conv_hyperparams_text_proto = """
@@ -422,7 +667,7 @@ class HyperparamsBuilderTest(tf.test.TestCase):
self.assertIsInstance(identity_layer,
tf.keras.layers.Lambda)
- def test_use_none_activation(self):
+ def test_do_not_use_bias_if_batch_norm_center_keras(self):
conv_hyperparams_text_proto = """
regularizer {
l2_regularizer {
@@ -432,17 +677,27 @@ class HyperparamsBuilderTest(tf.test.TestCase):
truncated_normal_initializer {
}
}
- activation: NONE
+ batch_norm {
+ decay: 0.7
+ center: true
+ scale: true
+ epsilon: 0.03
+ train: true
+ }
"""
conv_hyperparams_proto = hyperparams_pb2.Hyperparams()
text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
- scope_fn = hyperparams_builder.build(conv_hyperparams_proto,
- is_training=True)
- scope = scope_fn()
- conv_scope_arguments = scope[_get_scope_key(slim.conv2d)]
- self.assertEqual(conv_scope_arguments['activation_fn'], None)
+ keras_config = hyperparams_builder.KerasLayerHyperparams(
+ conv_hyperparams_proto)
- def test_use_none_activation_keras(self):
+ self.assertTrue(keras_config.use_batch_norm())
+ batch_norm_params = keras_config.batch_norm_params()
+ self.assertTrue(batch_norm_params['center'])
+ self.assertTrue(batch_norm_params['scale'])
+ hyperparams = keras_config.params()
+ self.assertFalse(hyperparams['use_bias'])
+
+ def test_force_use_bias_if_batch_norm_center_keras(self):
conv_hyperparams_text_proto = """
regularizer {
l2_regularizer {
@@ -452,20 +707,28 @@ class HyperparamsBuilderTest(tf.test.TestCase):
truncated_normal_initializer {
}
}
- activation: NONE
+ batch_norm {
+ decay: 0.7
+ center: true
+ scale: true
+ epsilon: 0.03
+ train: true
+ }
+ force_use_bias: true
"""
conv_hyperparams_proto = hyperparams_pb2.Hyperparams()
text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
keras_config = hyperparams_builder.KerasLayerHyperparams(
conv_hyperparams_proto)
- self.assertEqual(keras_config.params()['activation'], None)
- self.assertEqual(
- keras_config.params(include_activation=True)['activation'], None)
- activation_layer = keras_config.build_activation_layer()
- self.assertIsInstance(activation_layer, tf.keras.layers.Lambda)
- self.assertEqual(activation_layer.function, tf.identity)
- def test_use_relu_activation(self):
+ self.assertTrue(keras_config.use_batch_norm())
+ batch_norm_params = keras_config.batch_norm_params()
+ self.assertTrue(batch_norm_params['center'])
+ self.assertTrue(batch_norm_params['scale'])
+ hyperparams = keras_config.params()
+ self.assertTrue(hyperparams['use_bias'])
+
+ def test_use_none_activation_keras(self):
conv_hyperparams_text_proto = """
regularizer {
l2_regularizer {
@@ -475,15 +738,18 @@ class HyperparamsBuilderTest(tf.test.TestCase):
truncated_normal_initializer {
}
}
- activation: RELU
+ activation: NONE
"""
conv_hyperparams_proto = hyperparams_pb2.Hyperparams()
text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
- scope_fn = hyperparams_builder.build(conv_hyperparams_proto,
- is_training=True)
- scope = scope_fn()
- conv_scope_arguments = scope[_get_scope_key(slim.conv2d)]
- self.assertEqual(conv_scope_arguments['activation_fn'], tf.nn.relu)
+ keras_config = hyperparams_builder.KerasLayerHyperparams(
+ conv_hyperparams_proto)
+ self.assertIsNone(keras_config.params()['activation'])
+ self.assertIsNone(
+ keras_config.params(include_activation=True)['activation'])
+ activation_layer = keras_config.build_activation_layer()
+ self.assertIsInstance(activation_layer, tf.keras.layers.Lambda)
+ self.assertEqual(activation_layer.function, tf.identity)
def test_use_relu_activation_keras(self):
conv_hyperparams_text_proto = """
@@ -501,33 +767,13 @@ class HyperparamsBuilderTest(tf.test.TestCase):
text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
keras_config = hyperparams_builder.KerasLayerHyperparams(
conv_hyperparams_proto)
- self.assertEqual(keras_config.params()['activation'], None)
+ self.assertIsNone(keras_config.params()['activation'])
self.assertEqual(
keras_config.params(include_activation=True)['activation'], tf.nn.relu)
activation_layer = keras_config.build_activation_layer()
self.assertIsInstance(activation_layer, tf.keras.layers.Lambda)
self.assertEqual(activation_layer.function, tf.nn.relu)
- def test_use_relu_6_activation(self):
- conv_hyperparams_text_proto = """
- regularizer {
- l2_regularizer {
- }
- }
- initializer {
- truncated_normal_initializer {
- }
- }
- activation: RELU_6
- """
- conv_hyperparams_proto = hyperparams_pb2.Hyperparams()
- text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
- scope_fn = hyperparams_builder.build(conv_hyperparams_proto,
- is_training=True)
- scope = scope_fn()
- conv_scope_arguments = scope[_get_scope_key(slim.conv2d)]
- self.assertEqual(conv_scope_arguments['activation_fn'], tf.nn.relu6)
-
def test_use_relu_6_activation_keras(self):
conv_hyperparams_text_proto = """
regularizer {
@@ -544,33 +790,13 @@ class HyperparamsBuilderTest(tf.test.TestCase):
text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
keras_config = hyperparams_builder.KerasLayerHyperparams(
conv_hyperparams_proto)
- self.assertEqual(keras_config.params()['activation'], None)
+ self.assertIsNone(keras_config.params()['activation'])
self.assertEqual(
keras_config.params(include_activation=True)['activation'], tf.nn.relu6)
activation_layer = keras_config.build_activation_layer()
self.assertIsInstance(activation_layer, tf.keras.layers.Lambda)
self.assertEqual(activation_layer.function, tf.nn.relu6)
- def test_use_swish_activation(self):
- conv_hyperparams_text_proto = """
- regularizer {
- l2_regularizer {
- }
- }
- initializer {
- truncated_normal_initializer {
- }
- }
- activation: SWISH
- """
- conv_hyperparams_proto = hyperparams_pb2.Hyperparams()
- text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
- scope_fn = hyperparams_builder.build(conv_hyperparams_proto,
- is_training=True)
- scope = scope_fn()
- conv_scope_arguments = scope[_get_scope_key(slim.conv2d)]
- self.assertEqual(conv_scope_arguments['activation_fn'], tf.nn.swish)
-
def test_use_swish_activation_keras(self):
conv_hyperparams_text_proto = """
regularizer {
@@ -587,7 +813,7 @@ class HyperparamsBuilderTest(tf.test.TestCase):
text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
keras_config = hyperparams_builder.KerasLayerHyperparams(
conv_hyperparams_proto)
- self.assertEqual(keras_config.params()['activation'], None)
+ self.assertIsNone(keras_config.params()['activation'])
self.assertEqual(
keras_config.params(include_activation=True)['activation'], tf.nn.swish)
activation_layer = keras_config.build_activation_layer()
@@ -613,43 +839,6 @@ class HyperparamsBuilderTest(tf.test.TestCase):
new_params = keras_config.params(activation=tf.nn.relu)
self.assertEqual(new_params['activation'], tf.nn.relu)
- def _assert_variance_in_range(self, initializer, shape, variance,
- tol=1e-2):
- with tf.Graph().as_default() as g:
- with self.test_session(graph=g) as sess:
- var = tf.get_variable(
- name='test',
- shape=shape,
- dtype=tf.float32,
- initializer=initializer)
- sess.run(tf.global_variables_initializer())
- values = sess.run(var)
- self.assertAllClose(np.var(values), variance, tol, tol)
-
- def test_variance_in_range_with_variance_scaling_initializer_fan_in(self):
- conv_hyperparams_text_proto = """
- regularizer {
- l2_regularizer {
- }
- }
- initializer {
- variance_scaling_initializer {
- factor: 2.0
- mode: FAN_IN
- uniform: false
- }
- }
- """
- conv_hyperparams_proto = hyperparams_pb2.Hyperparams()
- text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
- scope_fn = hyperparams_builder.build(conv_hyperparams_proto,
- is_training=True)
- scope = scope_fn()
- conv_scope_arguments = scope[_get_scope_key(slim.conv2d)]
- initializer = conv_scope_arguments['weights_initializer']
- self._assert_variance_in_range(initializer, shape=[100, 40],
- variance=2. / 100.)
-
def test_variance_in_range_with_variance_scaling_initializer_fan_in_keras(
self):
conv_hyperparams_text_proto = """
@@ -673,30 +862,6 @@ class HyperparamsBuilderTest(tf.test.TestCase):
self._assert_variance_in_range(initializer, shape=[100, 40],
variance=2. / 100.)
- def test_variance_in_range_with_variance_scaling_initializer_fan_out(self):
- conv_hyperparams_text_proto = """
- regularizer {
- l2_regularizer {
- }
- }
- initializer {
- variance_scaling_initializer {
- factor: 2.0
- mode: FAN_OUT
- uniform: false
- }
- }
- """
- conv_hyperparams_proto = hyperparams_pb2.Hyperparams()
- text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
- scope_fn = hyperparams_builder.build(conv_hyperparams_proto,
- is_training=True)
- scope = scope_fn()
- conv_scope_arguments = scope[_get_scope_key(slim.conv2d)]
- initializer = conv_scope_arguments['weights_initializer']
- self._assert_variance_in_range(initializer, shape=[100, 40],
- variance=2. / 40.)
-
def test_variance_in_range_with_variance_scaling_initializer_fan_out_keras(
self):
conv_hyperparams_text_proto = """
@@ -720,30 +885,6 @@ class HyperparamsBuilderTest(tf.test.TestCase):
self._assert_variance_in_range(initializer, shape=[100, 40],
variance=2. / 40.)
- def test_variance_in_range_with_variance_scaling_initializer_fan_avg(self):
- conv_hyperparams_text_proto = """
- regularizer {
- l2_regularizer {
- }
- }
- initializer {
- variance_scaling_initializer {
- factor: 2.0
- mode: FAN_AVG
- uniform: false
- }
- }
- """
- conv_hyperparams_proto = hyperparams_pb2.Hyperparams()
- text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
- scope_fn = hyperparams_builder.build(conv_hyperparams_proto,
- is_training=True)
- scope = scope_fn()
- conv_scope_arguments = scope[_get_scope_key(slim.conv2d)]
- initializer = conv_scope_arguments['weights_initializer']
- self._assert_variance_in_range(initializer, shape=[100, 40],
- variance=4. / (100. + 40.))
-
def test_variance_in_range_with_variance_scaling_initializer_fan_avg_keras(
self):
conv_hyperparams_text_proto = """
@@ -767,30 +908,6 @@ class HyperparamsBuilderTest(tf.test.TestCase):
self._assert_variance_in_range(initializer, shape=[100, 40],
variance=4. / (100. + 40.))
- def test_variance_in_range_with_variance_scaling_initializer_uniform(self):
- conv_hyperparams_text_proto = """
- regularizer {
- l2_regularizer {
- }
- }
- initializer {
- variance_scaling_initializer {
- factor: 2.0
- mode: FAN_IN
- uniform: true
- }
- }
- """
- conv_hyperparams_proto = hyperparams_pb2.Hyperparams()
- text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
- scope_fn = hyperparams_builder.build(conv_hyperparams_proto,
- is_training=True)
- scope = scope_fn()
- conv_scope_arguments = scope[_get_scope_key(slim.conv2d)]
- initializer = conv_scope_arguments['weights_initializer']
- self._assert_variance_in_range(initializer, shape=[100, 40],
- variance=2. / 100.)
-
def test_variance_in_range_with_variance_scaling_initializer_uniform_keras(
self):
conv_hyperparams_text_proto = """
@@ -814,29 +931,6 @@ class HyperparamsBuilderTest(tf.test.TestCase):
self._assert_variance_in_range(initializer, shape=[100, 40],
variance=2. / 100.)
- def test_variance_in_range_with_truncated_normal_initializer(self):
- conv_hyperparams_text_proto = """
- regularizer {
- l2_regularizer {
- }
- }
- initializer {
- truncated_normal_initializer {
- mean: 0.0
- stddev: 0.8
- }
- }
- """
- conv_hyperparams_proto = hyperparams_pb2.Hyperparams()
- text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
- scope_fn = hyperparams_builder.build(conv_hyperparams_proto,
- is_training=True)
- scope = scope_fn()
- conv_scope_arguments = scope[_get_scope_key(slim.conv2d)]
- initializer = conv_scope_arguments['weights_initializer']
- self._assert_variance_in_range(initializer, shape=[100, 40],
- variance=0.49, tol=1e-1)
-
def test_variance_in_range_with_truncated_normal_initializer_keras(self):
conv_hyperparams_text_proto = """
regularizer {
@@ -858,29 +952,6 @@ class HyperparamsBuilderTest(tf.test.TestCase):
self._assert_variance_in_range(initializer, shape=[100, 40],
variance=0.49, tol=1e-1)
- def test_variance_in_range_with_random_normal_initializer(self):
- conv_hyperparams_text_proto = """
- regularizer {
- l2_regularizer {
- }
- }
- initializer {
- random_normal_initializer {
- mean: 0.0
- stddev: 0.8
- }
- }
- """
- conv_hyperparams_proto = hyperparams_pb2.Hyperparams()
- text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams_proto)
- scope_fn = hyperparams_builder.build(conv_hyperparams_proto,
- is_training=True)
- scope = scope_fn()
- conv_scope_arguments = scope[_get_scope_key(slim.conv2d)]
- initializer = conv_scope_arguments['weights_initializer']
- self._assert_variance_in_range(initializer, shape=[100, 40],
- variance=0.64, tol=1e-1)
-
def test_variance_in_range_with_random_normal_initializer_keras(self):
conv_hyperparams_text_proto = """
regularizer {
@@ -902,6 +973,5 @@ class HyperparamsBuilderTest(tf.test.TestCase):
self._assert_variance_in_range(initializer, shape=[100, 40],
variance=0.64, tol=1e-1)
-
if __name__ == '__main__':
tf.test.main()
diff --git a/research/object_detection/builders/image_resizer_builder_test.py b/research/object_detection/builders/image_resizer_builder_test.py
index 62ea5dc9b1245589f7b0bf132ec82d1bf8a0c392..dfc456eab1da1ea7952d17be4d14fab9ca8bf9a4 100644
--- a/research/object_detection/builders/image_resizer_builder_test.py
+++ b/research/object_detection/builders/image_resizer_builder_test.py
@@ -18,21 +18,23 @@ import tensorflow.compat.v1 as tf
from google.protobuf import text_format
from object_detection.builders import image_resizer_builder
from object_detection.protos import image_resizer_pb2
+from object_detection.utils import test_case
-class ImageResizerBuilderTest(tf.test.TestCase):
+class ImageResizerBuilderTest(test_case.TestCase):
def _shape_of_resized_random_image_given_text_proto(self, input_shape,
text_proto):
image_resizer_config = image_resizer_pb2.ImageResizer()
text_format.Merge(text_proto, image_resizer_config)
image_resizer_fn = image_resizer_builder.build(image_resizer_config)
- images = tf.cast(
- tf.random_uniform(input_shape, minval=0, maxval=255, dtype=tf.int32),
- dtype=tf.float32)
- resized_images, _ = image_resizer_fn(images)
- with self.test_session() as sess:
- return sess.run(resized_images).shape
+ def graph_fn():
+ images = tf.cast(
+ tf.random_uniform(input_shape, minval=0, maxval=255, dtype=tf.int32),
+ dtype=tf.float32)
+ resized_images, _ = image_resizer_fn(images)
+ return resized_images
+ return self.execute_cpu(graph_fn, []).shape
def test_build_keep_aspect_ratio_resizer_returns_expected_shape(self):
image_resizer_text_proto = """
@@ -125,10 +127,10 @@ class ImageResizerBuilderTest(tf.test.TestCase):
image_resizer_config = image_resizer_pb2.ImageResizer()
text_format.Merge(text_proto, image_resizer_config)
image_resizer_fn = image_resizer_builder.build(image_resizer_config)
- image_placeholder = tf.placeholder(tf.uint8, [1, None, None, 3])
- resized_image, _ = image_resizer_fn(image_placeholder)
- with self.test_session() as sess:
- return sess.run(resized_image, feed_dict={image_placeholder: image})
+ def graph_fn(image):
+ resized_image, _ = image_resizer_fn(image)
+ return resized_image
+ return self.execute_cpu(graph_fn, [image])
def test_fixed_shape_resizer_nearest_neighbor_method(self):
image_resizer_text_proto = """
diff --git a/research/object_detection/builders/input_reader_builder.py b/research/object_detection/builders/input_reader_builder.py
index 0ab9c05b72653dddcec94ca3857928e662cdd9eb..c7755177e70d528984ea425f21fb9afaf11d9eaa 100644
--- a/research/object_detection/builders/input_reader_builder.py
+++ b/research/object_detection/builders/input_reader_builder.py
@@ -29,19 +29,12 @@ from __future__ import division
from __future__ import print_function
import tensorflow.compat.v1 as tf
+import tf_slim as slim
from object_detection.data_decoders import tf_example_decoder
from object_detection.data_decoders import tf_sequence_example_decoder
from object_detection.protos import input_reader_pb2
-# pylint: disable=g-import-not-at-top
-try:
- import tf_slim as slim
-except ImportError:
- # TF 2.0 doesn't ship with contrib.
- pass
-# pylint: enable=g-import-not-at-top
-
parallel_reader = slim.parallel_reader
@@ -82,14 +75,14 @@ def build(input_reader_config):
if input_reader_config.HasField('label_map_path'):
label_map_proto_file = input_reader_config.label_map_path
input_type = input_reader_config.input_type
- if input_type == input_reader_pb2.InputType.TF_EXAMPLE:
+ if input_type == input_reader_pb2.InputType.Value('TF_EXAMPLE'):
decoder = tf_example_decoder.TfExampleDecoder(
load_instance_masks=input_reader_config.load_instance_masks,
instance_mask_type=input_reader_config.mask_type,
label_map_proto_file=label_map_proto_file,
load_context_features=input_reader_config.load_context_features)
return decoder.decode(string_tensor)
- elif input_type == input_reader_pb2.InputType.TF_SEQUENCE_EXAMPLE:
+ elif input_type == input_reader_pb2.InputType.Value('TF_SEQUENCE_EXAMPLE'):
decoder = tf_sequence_example_decoder.TfSequenceExampleDecoder(
label_map_proto_file=label_map_proto_file,
load_context_features=input_reader_config.load_context_features)
diff --git a/research/object_detection/builders/input_reader_builder_test.py b/research/object_detection/builders/input_reader_builder_tf1_test.py
similarity index 98%
rename from research/object_detection/builders/input_reader_builder_test.py
rename to research/object_detection/builders/input_reader_builder_tf1_test.py
index 14a8eb819304e2f9db655067d0d817a22386f4ba..6049128b03f55501ddcd2a1b3334821800d826a1 100644
--- a/research/object_detection/builders/input_reader_builder_test.py
+++ b/research/object_detection/builders/input_reader_builder_tf1_test.py
@@ -16,6 +16,7 @@
"""Tests for input_reader_builder."""
import os
+import unittest
import numpy as np
import tensorflow.compat.v1 as tf
@@ -26,6 +27,7 @@ from object_detection.core import standard_fields as fields
from object_detection.dataset_tools import seq_example_util
from object_detection.protos import input_reader_pb2
from object_detection.utils import dataset_util
+from object_detection.utils import tf_version
def _get_labelmap_path():
@@ -35,6 +37,7 @@ def _get_labelmap_path():
'pet_label_map.pbtxt')
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.')
class InputReaderBuilderTest(tf.test.TestCase):
def create_tf_record(self):
diff --git a/research/object_detection/builders/matcher_builder.py b/research/object_detection/builders/matcher_builder.py
index d334f435372984eb78265d72b2bcdf63c45bde5b..086f74b5c45f81cd555207f0ad593a52a0c0f307 100644
--- a/research/object_detection/builders/matcher_builder.py
+++ b/research/object_detection/builders/matcher_builder.py
@@ -16,8 +16,11 @@
"""A function to build an object detection matcher from configuration."""
from object_detection.matchers import argmax_matcher
-from object_detection.matchers import bipartite_matcher
from object_detection.protos import matcher_pb2
+from object_detection.utils import tf_version
+
+if tf_version.is_tf1():
+ from object_detection.matchers import bipartite_matcher # pylint: disable=g-import-not-at-top
def build(matcher_config):
@@ -48,6 +51,8 @@ def build(matcher_config):
force_match_for_each_row=matcher.force_match_for_each_row,
use_matmul_gather=matcher.use_matmul_gather)
if matcher_config.WhichOneof('matcher_oneof') == 'bipartite_matcher':
+ if tf_version.is_tf2():
+ raise ValueError('bipartite_matcher is not supported in TF 2.X')
matcher = matcher_config.bipartite_matcher
return bipartite_matcher.GreedyBipartiteMatcher(matcher.use_matmul_gather)
raise ValueError('Empty matcher.')
diff --git a/research/object_detection/builders/matcher_builder_test.py b/research/object_detection/builders/matcher_builder_test.py
index 451e1f9cc468fd828c453cad59b74ce67d63d092..cfa55ff94fb7a12dbf78787ffbbf762d1890e3bc 100644
--- a/research/object_detection/builders/matcher_builder_test.py
+++ b/research/object_detection/builders/matcher_builder_test.py
@@ -20,11 +20,15 @@ import tensorflow.compat.v1 as tf
from google.protobuf import text_format
from object_detection.builders import matcher_builder
from object_detection.matchers import argmax_matcher
-from object_detection.matchers import bipartite_matcher
from object_detection.protos import matcher_pb2
+from object_detection.utils import test_case
+from object_detection.utils import tf_version
+if tf_version.is_tf1():
+ from object_detection.matchers import bipartite_matcher # pylint: disable=g-import-not-at-top
-class MatcherBuilderTest(tf.test.TestCase):
+
+class MatcherBuilderTest(test_case.TestCase):
def test_build_arg_max_matcher_with_defaults(self):
matcher_text_proto = """
@@ -34,7 +38,7 @@ class MatcherBuilderTest(tf.test.TestCase):
matcher_proto = matcher_pb2.Matcher()
text_format.Merge(matcher_text_proto, matcher_proto)
matcher_object = matcher_builder.build(matcher_proto)
- self.assertTrue(isinstance(matcher_object, argmax_matcher.ArgMaxMatcher))
+ self.assertIsInstance(matcher_object, argmax_matcher.ArgMaxMatcher)
self.assertAlmostEqual(matcher_object._matched_threshold, 0.5)
self.assertAlmostEqual(matcher_object._unmatched_threshold, 0.5)
self.assertTrue(matcher_object._negatives_lower_than_unmatched)
@@ -49,7 +53,7 @@ class MatcherBuilderTest(tf.test.TestCase):
matcher_proto = matcher_pb2.Matcher()
text_format.Merge(matcher_text_proto, matcher_proto)
matcher_object = matcher_builder.build(matcher_proto)
- self.assertTrue(isinstance(matcher_object, argmax_matcher.ArgMaxMatcher))
+ self.assertIsInstance(matcher_object, argmax_matcher.ArgMaxMatcher)
self.assertEqual(matcher_object._matched_threshold, None)
self.assertEqual(matcher_object._unmatched_threshold, None)
self.assertTrue(matcher_object._negatives_lower_than_unmatched)
@@ -68,7 +72,7 @@ class MatcherBuilderTest(tf.test.TestCase):
matcher_proto = matcher_pb2.Matcher()
text_format.Merge(matcher_text_proto, matcher_proto)
matcher_object = matcher_builder.build(matcher_proto)
- self.assertTrue(isinstance(matcher_object, argmax_matcher.ArgMaxMatcher))
+ self.assertIsInstance(matcher_object, argmax_matcher.ArgMaxMatcher)
self.assertAlmostEqual(matcher_object._matched_threshold, 0.7)
self.assertAlmostEqual(matcher_object._unmatched_threshold, 0.3)
self.assertFalse(matcher_object._negatives_lower_than_unmatched)
@@ -76,6 +80,8 @@ class MatcherBuilderTest(tf.test.TestCase):
self.assertTrue(matcher_object._use_matmul_gather)
def test_build_bipartite_matcher(self):
+ if tf_version.is_tf2():
+ self.skipTest('BipartiteMatcher unsupported in TF 2.X. Skipping.')
matcher_text_proto = """
bipartite_matcher {
}
@@ -83,8 +89,8 @@ class MatcherBuilderTest(tf.test.TestCase):
matcher_proto = matcher_pb2.Matcher()
text_format.Merge(matcher_text_proto, matcher_proto)
matcher_object = matcher_builder.build(matcher_proto)
- self.assertTrue(
- isinstance(matcher_object, bipartite_matcher.GreedyBipartiteMatcher))
+ self.assertIsInstance(matcher_object,
+ bipartite_matcher.GreedyBipartiteMatcher)
def test_raise_error_on_empty_matcher(self):
matcher_text_proto = """
diff --git a/research/object_detection/builders/model_builder.py b/research/object_detection/builders/model_builder.py
index 481cd694bb9916942b472a37427033855155b8d9..b3d41e3fce0adef4a97835380777338784af5995 100644
--- a/research/object_detection/builders/model_builder.py
+++ b/research/object_detection/builders/model_builder.py
@@ -28,6 +28,8 @@ from object_detection.builders import region_similarity_calculator_builder as si
from object_detection.core import balanced_positive_negative_sampler as sampler
from object_detection.core import post_processing
from object_detection.core import target_assigner
+from object_detection.meta_architectures import center_net_meta_arch
+from object_detection.meta_architectures import context_rcnn_meta_arch
from object_detection.meta_architectures import faster_rcnn_meta_arch
from object_detection.meta_architectures import rfcn_meta_arch
from object_detection.meta_architectures import ssd_meta_arch
@@ -47,6 +49,7 @@ from object_detection.utils import tf_version
if tf_version.is_tf2():
from object_detection.models import center_net_hourglass_feature_extractor
from object_detection.models import center_net_resnet_feature_extractor
+ from object_detection.models import center_net_resnet_v1_fpn_feature_extractor
from object_detection.models import faster_rcnn_inception_resnet_v2_keras_feature_extractor as frcnn_inc_res_keras
from object_detection.models import faster_rcnn_resnet_keras_feature_extractor as frcnn_resnet_keras
from object_detection.models import ssd_resnet_v1_fpn_keras_feature_extractor as ssd_resnet_v1_fpn_keras
@@ -79,6 +82,7 @@ if tf_version.is_tf1():
from object_detection.models.ssd_mobiledet_feature_extractor import SSDMobileDetCPUFeatureExtractor
from object_detection.models.ssd_mobiledet_feature_extractor import SSDMobileDetDSPFeatureExtractor
from object_detection.models.ssd_mobiledet_feature_extractor import SSDMobileDetEdgeTPUFeatureExtractor
+ from object_detection.models.ssd_mobiledet_feature_extractor import SSDMobileDetGPUFeatureExtractor
from object_detection.models.ssd_pnasnet_feature_extractor import SSDPNASNetFeatureExtractor
from object_detection.predictors import rfcn_box_predictor
# pylint: enable=g-import-not-at-top
@@ -109,8 +113,12 @@ if tf_version.is_tf2():
}
CENTER_NET_EXTRACTOR_FUNCTION_MAP = {
- 'resnet_v2_101': center_net_resnet_feature_extractor.resnet_v2_101,
'resnet_v2_50': center_net_resnet_feature_extractor.resnet_v2_50,
+ 'resnet_v2_101': center_net_resnet_feature_extractor.resnet_v2_101,
+ 'resnet_v1_50_fpn':
+ center_net_resnet_v1_fpn_feature_extractor.resnet_v1_50_fpn,
+ 'resnet_v1_101_fpn':
+ center_net_resnet_v1_fpn_feature_extractor.resnet_v1_101_fpn,
'hourglass_104': center_net_hourglass_feature_extractor.hourglass_104,
}
@@ -160,9 +168,14 @@ if tf_version.is_tf1():
EmbeddedSSDMobileNetV1FeatureExtractor,
'ssd_pnasnet':
SSDPNASNetFeatureExtractor,
- 'ssd_mobiledet_cpu': SSDMobileDetCPUFeatureExtractor,
- 'ssd_mobiledet_dsp': SSDMobileDetDSPFeatureExtractor,
- 'ssd_mobiledet_edgetpu': SSDMobileDetEdgeTPUFeatureExtractor,
+ 'ssd_mobiledet_cpu':
+ SSDMobileDetCPUFeatureExtractor,
+ 'ssd_mobiledet_dsp':
+ SSDMobileDetDSPFeatureExtractor,
+ 'ssd_mobiledet_edgetpu':
+ SSDMobileDetEdgeTPUFeatureExtractor,
+ 'ssd_mobiledet_gpu':
+ SSDMobileDetGPUFeatureExtractor,
}
FASTER_RCNN_FEATURE_EXTRACTOR_CLASS_MAP = {
@@ -767,7 +780,9 @@ def keypoint_proto_to_params(kp_config, keypoint_map_dict):
unmatched_keypoint_score=kp_config.unmatched_keypoint_score,
box_scale=kp_config.box_scale,
candidate_search_scale=kp_config.candidate_search_scale,
- candidate_ranking_mode=kp_config.candidate_ranking_mode)
+ candidate_ranking_mode=kp_config.candidate_ranking_mode,
+ offset_peak_radius=kp_config.offset_peak_radius,
+ per_keypoint_offset=kp_config.per_keypoint_offset)
def object_detection_proto_to_params(od_config):
diff --git a/research/object_detection/builders/model_builder_tf1_test.py b/research/object_detection/builders/model_builder_tf1_test.py
index a4d2913f52fcbf551da91596ff1077caf02d94f1..083275ac466250220dd532f52288bab9a5a66daf 100644
--- a/research/object_detection/builders/model_builder_tf1_test.py
+++ b/research/object_detection/builders/model_builder_tf1_test.py
@@ -14,16 +14,19 @@
# limitations under the License.
# ==============================================================================
"""Tests for model_builder under TensorFlow 1.X."""
-
+import unittest
from absl.testing import parameterized
import tensorflow.compat.v1 as tf
from object_detection.builders import model_builder
from object_detection.builders import model_builder_test
+from object_detection.meta_architectures import context_rcnn_meta_arch
from object_detection.meta_architectures import ssd_meta_arch
from object_detection.protos import losses_pb2
+from object_detection.utils import tf_version
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.')
class ModelBuilderTF1Test(model_builder_test.ModelBuilderTest):
def default_ssd_feature_extractor(self):
@@ -39,6 +42,14 @@ class ModelBuilderTF1Test(model_builder_test.ModelBuilderTest):
return model_builder.FASTER_RCNN_FEATURE_EXTRACTOR_CLASS_MAP
+ @parameterized.parameters(True, False)
+ def test_create_context_rcnn_from_config_with_params(self, is_training):
+ model_proto = self.create_default_faster_rcnn_model_proto()
+ model_proto.faster_rcnn.context_config.attention_bottleneck_dimension = 10
+ model_proto.faster_rcnn.context_config.attention_temperature = 0.5
+ model = model_builder.build(model_proto, is_training=is_training)
+ self.assertIsInstance(model, context_rcnn_meta_arch.ContextRCNNMetaArch)
+
if __name__ == '__main__':
tf.test.main()
diff --git a/research/object_detection/builders/model_builder_tf2_test.py b/research/object_detection/builders/model_builder_tf2_test.py
new file mode 100644
index 0000000000000000000000000000000000000000..c2cd237292ab8cb534aa760380fb31e7a68f1e43
--- /dev/null
+++ b/research/object_detection/builders/model_builder_tf2_test.py
@@ -0,0 +1,261 @@
+# Lint as: python2, python3
+# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Tests for model_builder under TensorFlow 2.X."""
+
+import os
+import unittest
+
+import tensorflow.compat.v1 as tf
+
+from google.protobuf import text_format
+from object_detection.builders import model_builder
+from object_detection.builders import model_builder_test
+from object_detection.core import losses
+from object_detection.models import center_net_resnet_feature_extractor
+from object_detection.protos import center_net_pb2
+from object_detection.protos import model_pb2
+from object_detection.utils import tf_version
+
+
+@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.')
+class ModelBuilderTF2Test(model_builder_test.ModelBuilderTest):
+
+ def default_ssd_feature_extractor(self):
+ return 'ssd_resnet50_v1_fpn_keras'
+
+ def default_faster_rcnn_feature_extractor(self):
+ return 'faster_rcnn_resnet101_keras'
+
+ def ssd_feature_extractors(self):
+ return model_builder.SSD_KERAS_FEATURE_EXTRACTOR_CLASS_MAP
+
+ def faster_rcnn_feature_extractors(self):
+ return model_builder.FASTER_RCNN_KERAS_FEATURE_EXTRACTOR_CLASS_MAP
+
+ def get_fake_label_map_file_path(self):
+ keypoint_spec_text = """
+ item {
+ name: "/m/01g317"
+ id: 1
+ display_name: "person"
+ keypoints {
+ id: 0
+ label: 'nose'
+ }
+ keypoints {
+ id: 1
+ label: 'left_shoulder'
+ }
+ keypoints {
+ id: 2
+ label: 'right_shoulder'
+ }
+ keypoints {
+ id: 3
+ label: 'hip'
+ }
+ }
+ """
+ keypoint_label_map_path = os.path.join(
+ self.get_temp_dir(), 'keypoint_label_map')
+ with tf.gfile.Open(keypoint_label_map_path, 'wb') as f:
+ f.write(keypoint_spec_text)
+ return keypoint_label_map_path
+
+ def get_fake_keypoint_proto(self):
+ task_proto_txt = """
+ task_name: "human_pose"
+ task_loss_weight: 0.9
+ keypoint_regression_loss_weight: 1.0
+ keypoint_heatmap_loss_weight: 0.1
+ keypoint_offset_loss_weight: 0.5
+ heatmap_bias_init: 2.14
+ keypoint_class_name: "/m/01g317"
+ loss {
+ classification_loss {
+ penalty_reduced_logistic_focal_loss {
+ alpha: 3.0
+ beta: 4.0
+ }
+ }
+ localization_loss {
+ l1_localization_loss {
+ }
+ }
+ }
+ keypoint_label_to_std {
+ key: "nose"
+ value: 0.3
+ }
+ keypoint_label_to_std {
+ key: "hip"
+ value: 0.0
+ }
+ keypoint_candidate_score_threshold: 0.3
+ num_candidates_per_keypoint: 12
+ peak_max_pool_kernel_size: 5
+ unmatched_keypoint_score: 0.05
+ box_scale: 1.7
+ candidate_search_scale: 0.2
+ candidate_ranking_mode: "score_distance_ratio"
+ offset_peak_radius: 3
+ per_keypoint_offset: true
+ """
+ config = text_format.Merge(task_proto_txt,
+ center_net_pb2.CenterNet.KeypointEstimation())
+ return config
+
+ def get_fake_object_center_proto(self):
+ proto_txt = """
+ object_center_loss_weight: 0.5
+ heatmap_bias_init: 3.14
+ min_box_overlap_iou: 0.2
+ max_box_predictions: 15
+ classification_loss {
+ penalty_reduced_logistic_focal_loss {
+ alpha: 3.0
+ beta: 4.0
+ }
+ }
+ """
+ return text_format.Merge(proto_txt,
+ center_net_pb2.CenterNet.ObjectCenterParams())
+
+ def get_fake_object_detection_proto(self):
+ proto_txt = """
+ task_loss_weight: 0.5
+ offset_loss_weight: 0.1
+ scale_loss_weight: 0.2
+ localization_loss {
+ l1_localization_loss {
+ }
+ }
+ """
+ return text_format.Merge(proto_txt,
+ center_net_pb2.CenterNet.ObjectDetection())
+
+ def get_fake_mask_proto(self):
+ proto_txt = """
+ task_loss_weight: 0.7
+ classification_loss {
+ weighted_softmax {}
+ }
+ mask_height: 8
+ mask_width: 8
+ score_threshold: 0.7
+ heatmap_bias_init: -2.0
+ """
+ return text_format.Merge(proto_txt,
+ center_net_pb2.CenterNet.MaskEstimation())
+
+ def test_create_center_net_model(self):
+ """Test building a CenterNet model from proto txt."""
+ proto_txt = """
+ center_net {
+ num_classes: 10
+ feature_extractor {
+ type: "resnet_v2_101"
+ channel_stds: [4, 5, 6]
+ bgr_ordering: true
+ }
+ image_resizer {
+ keep_aspect_ratio_resizer {
+ min_dimension: 512
+ max_dimension: 512
+ pad_to_max_dimension: true
+ }
+ }
+ }
+ """
+ # Set up the configuration proto.
+ config = text_format.Merge(proto_txt, model_pb2.DetectionModel())
+ config.center_net.object_center_params.CopyFrom(
+ self.get_fake_object_center_proto())
+ config.center_net.object_detection_task.CopyFrom(
+ self.get_fake_object_detection_proto())
+ config.center_net.keypoint_estimation_task.append(
+ self.get_fake_keypoint_proto())
+ config.center_net.keypoint_label_map_path = (
+ self.get_fake_label_map_file_path())
+ config.center_net.mask_estimation_task.CopyFrom(
+ self.get_fake_mask_proto())
+
+ # Build the model from the configuration.
+ model = model_builder.build(config, is_training=True)
+
+ # Check object center related parameters.
+ self.assertEqual(model._num_classes, 10)
+ self.assertIsInstance(model._center_params.classification_loss,
+ losses.PenaltyReducedLogisticFocalLoss)
+ self.assertEqual(model._center_params.classification_loss._alpha, 3.0)
+ self.assertEqual(model._center_params.classification_loss._beta, 4.0)
+ self.assertAlmostEqual(model._center_params.min_box_overlap_iou, 0.2)
+ self.assertAlmostEqual(
+ model._center_params.heatmap_bias_init, 3.14, places=4)
+ self.assertEqual(model._center_params.max_box_predictions, 15)
+
+ # Check object detection related parameters.
+ self.assertAlmostEqual(model._od_params.offset_loss_weight, 0.1)
+ self.assertAlmostEqual(model._od_params.scale_loss_weight, 0.2)
+ self.assertAlmostEqual(model._od_params.task_loss_weight, 0.5)
+ self.assertIsInstance(model._od_params.localization_loss,
+ losses.L1LocalizationLoss)
+
+ # Check keypoint estimation related parameters.
+ kp_params = model._kp_params_dict['human_pose']
+ self.assertAlmostEqual(kp_params.task_loss_weight, 0.9)
+ self.assertAlmostEqual(kp_params.keypoint_regression_loss_weight, 1.0)
+ self.assertAlmostEqual(kp_params.keypoint_offset_loss_weight, 0.5)
+ self.assertAlmostEqual(kp_params.heatmap_bias_init, 2.14, places=4)
+ self.assertEqual(kp_params.classification_loss._alpha, 3.0)
+ self.assertEqual(kp_params.keypoint_indices, [0, 1, 2, 3])
+ self.assertEqual(kp_params.keypoint_labels,
+ ['nose', 'left_shoulder', 'right_shoulder', 'hip'])
+ self.assertAllClose(kp_params.keypoint_std_dev, [0.3, 1.0, 1.0, 0.0])
+ self.assertEqual(kp_params.classification_loss._beta, 4.0)
+ self.assertIsInstance(kp_params.localization_loss,
+ losses.L1LocalizationLoss)
+ self.assertAlmostEqual(kp_params.keypoint_candidate_score_threshold, 0.3)
+ self.assertEqual(kp_params.num_candidates_per_keypoint, 12)
+ self.assertEqual(kp_params.peak_max_pool_kernel_size, 5)
+ self.assertAlmostEqual(kp_params.unmatched_keypoint_score, 0.05)
+ self.assertAlmostEqual(kp_params.box_scale, 1.7)
+ self.assertAlmostEqual(kp_params.candidate_search_scale, 0.2)
+ self.assertEqual(kp_params.candidate_ranking_mode, 'score_distance_ratio')
+ self.assertEqual(kp_params.offset_peak_radius, 3)
+ self.assertEqual(kp_params.per_keypoint_offset, True)
+
+ # Check mask related parameters.
+ self.assertAlmostEqual(model._mask_params.task_loss_weight, 0.7)
+ self.assertIsInstance(model._mask_params.classification_loss,
+ losses.WeightedSoftmaxClassificationLoss)
+ self.assertEqual(model._mask_params.mask_height, 8)
+ self.assertEqual(model._mask_params.mask_width, 8)
+ self.assertAlmostEqual(model._mask_params.score_threshold, 0.7)
+ self.assertAlmostEqual(
+ model._mask_params.heatmap_bias_init, -2.0, places=4)
+
+ # Check feature extractor parameters.
+ self.assertIsInstance(
+ model._feature_extractor,
+ center_net_resnet_feature_extractor.CenterNetResnetFeatureExtractor)
+ self.assertAllClose(model._feature_extractor._channel_means, [0, 0, 0])
+ self.assertAllClose(model._feature_extractor._channel_stds, [4, 5, 6])
+ self.assertTrue(model._feature_extractor._bgr_ordering)
+
+
+if __name__ == '__main__':
+ tf.test.main()
diff --git a/research/object_detection/builders/optimizer_builder.py b/research/object_detection/builders/optimizer_builder.py
index 548b5cdcf2cc15f53379377df9bc195c1283f9fd..d602bad1292e222b5cbc532a873299dd918ef011 100644
--- a/research/object_detection/builders/optimizer_builder.py
+++ b/research/object_detection/builders/optimizer_builder.py
@@ -17,10 +17,13 @@
import tensorflow.compat.v1 as tf
-
-from tensorflow.contrib import opt as tf_opt
from object_detection.utils import learning_schedules
+try:
+ from tensorflow.contrib import opt as tf_opt # pylint: disable=g-import-not-at-top
+except: # pylint: disable=bare-except
+ pass
+
def build_optimizers_tf_v1(optimizer_config, global_step=None):
"""Create a TF v1 compatible optimizer based on config.
diff --git a/research/object_detection/builders/optimizer_builder_tf1_test.py b/research/object_detection/builders/optimizer_builder_tf1_test.py
index 9a6d1e404f94181ebf68826f1708ba43949eda9b..350ecb84b11b3fbd87e584a5d8d23ae877089078 100644
--- a/research/object_detection/builders/optimizer_builder_tf1_test.py
+++ b/research/object_detection/builders/optimizer_builder_tf1_test.py
@@ -20,6 +20,7 @@ from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
+import unittest
import six
import tensorflow.compat.v1 as tf
@@ -27,16 +28,15 @@ from google.protobuf import text_format
from object_detection.builders import optimizer_builder
from object_detection.protos import optimizer_pb2
+from object_detection.utils import tf_version
# pylint: disable=g-import-not-at-top
-try:
+if tf_version.is_tf1():
from tensorflow.contrib import opt as contrib_opt
-except ImportError:
- # TF 2.0 doesn't ship with contrib.
- pass
# pylint: enable=g-import-not-at-top
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.')
class LearningRateBuilderTest(tf.test.TestCase):
def testBuildConstantLearningRate(self):
@@ -118,6 +118,7 @@ class LearningRateBuilderTest(tf.test.TestCase):
optimizer_builder._create_learning_rate(learning_rate_proto)
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.')
class OptimizerBuilderTest(tf.test.TestCase):
def testBuildRMSPropOptimizer(self):
diff --git a/research/object_detection/builders/optimizer_builder_tf2_test.py b/research/object_detection/builders/optimizer_builder_tf2_test.py
new file mode 100644
index 0000000000000000000000000000000000000000..2c555f9a0f4c22b7c27955c92eaa3655c8fae5c6
--- /dev/null
+++ b/research/object_detection/builders/optimizer_builder_tf2_test.py
@@ -0,0 +1,104 @@
+# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+
+"""Tests for optimizer_builder."""
+import unittest
+import tensorflow.compat.v1 as tf
+
+from google.protobuf import text_format
+
+from object_detection.builders import optimizer_builder
+from object_detection.protos import optimizer_pb2
+from object_detection.utils import tf_version
+
+
+@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.')
+class OptimizerBuilderV2Test(tf.test.TestCase):
+ """Test building optimizers in V2 mode."""
+
+ def testBuildRMSPropOptimizer(self):
+ optimizer_text_proto = """
+ rms_prop_optimizer: {
+ learning_rate: {
+ exponential_decay_learning_rate {
+ initial_learning_rate: 0.004
+ decay_steps: 800720
+ decay_factor: 0.95
+ }
+ }
+ momentum_optimizer_value: 0.9
+ decay: 0.9
+ epsilon: 1.0
+ }
+ use_moving_average: false
+ """
+ optimizer_proto = optimizer_pb2.Optimizer()
+ text_format.Merge(optimizer_text_proto, optimizer_proto)
+ optimizer, _ = optimizer_builder.build(optimizer_proto)
+ self.assertIsInstance(optimizer, tf.keras.optimizers.RMSprop)
+
+ def testBuildMomentumOptimizer(self):
+ optimizer_text_proto = """
+ momentum_optimizer: {
+ learning_rate: {
+ constant_learning_rate {
+ learning_rate: 0.001
+ }
+ }
+ momentum_optimizer_value: 0.99
+ }
+ use_moving_average: false
+ """
+ optimizer_proto = optimizer_pb2.Optimizer()
+ text_format.Merge(optimizer_text_proto, optimizer_proto)
+ optimizer, _ = optimizer_builder.build(optimizer_proto)
+ self.assertIsInstance(optimizer, tf.keras.optimizers.SGD)
+
+ def testBuildAdamOptimizer(self):
+ optimizer_text_proto = """
+ adam_optimizer: {
+ learning_rate: {
+ constant_learning_rate {
+ learning_rate: 0.002
+ }
+ }
+ }
+ use_moving_average: false
+ """
+ optimizer_proto = optimizer_pb2.Optimizer()
+ text_format.Merge(optimizer_text_proto, optimizer_proto)
+ optimizer, _ = optimizer_builder.build(optimizer_proto)
+ self.assertIsInstance(optimizer, tf.keras.optimizers.Adam)
+
+ def testMovingAverageOptimizerUnsupported(self):
+ optimizer_text_proto = """
+ adam_optimizer: {
+ learning_rate: {
+ constant_learning_rate {
+ learning_rate: 0.002
+ }
+ }
+ }
+ use_moving_average: True
+ """
+ optimizer_proto = optimizer_pb2.Optimizer()
+ text_format.Merge(optimizer_text_proto, optimizer_proto)
+ with self.assertRaises(ValueError):
+ optimizer_builder.build(optimizer_proto)
+
+
+if __name__ == '__main__':
+ tf.enable_v2_behavior()
+ tf.test.main()
diff --git a/research/object_detection/builders/post_processing_builder_test.py b/research/object_detection/builders/post_processing_builder_test.py
index d163aa8f2701df742e27aaa7225bf358f671e8e7..b7383c92f99637ebf660d40a6074c65b03abd3c5 100644
--- a/research/object_detection/builders/post_processing_builder_test.py
+++ b/research/object_detection/builders/post_processing_builder_test.py
@@ -19,9 +19,10 @@ import tensorflow.compat.v1 as tf
from google.protobuf import text_format
from object_detection.builders import post_processing_builder
from object_detection.protos import post_processing_pb2
+from object_detection.utils import test_case
-class PostProcessingBuilderTest(tf.test.TestCase):
+class PostProcessingBuilderTest(test_case.TestCase):
def test_build_non_max_suppressor_with_correct_parameters(self):
post_processing_text_proto = """
@@ -77,13 +78,12 @@ class PostProcessingBuilderTest(tf.test.TestCase):
_, score_converter = post_processing_builder.build(
post_processing_config)
self.assertEqual(score_converter.__name__, 'identity_with_logit_scale')
-
- inputs = tf.constant([1, 1], tf.float32)
- outputs = score_converter(inputs)
- with self.test_session() as sess:
- converted_scores = sess.run(outputs)
- expected_converted_scores = sess.run(inputs)
- self.assertAllClose(converted_scores, expected_converted_scores)
+ def graph_fn():
+ inputs = tf.constant([1, 1], tf.float32)
+ outputs = score_converter(inputs)
+ return outputs
+ converted_scores = self.execute_cpu(graph_fn, [])
+ self.assertAllClose(converted_scores, [1, 1])
def test_build_identity_score_converter_with_logit_scale(self):
post_processing_text_proto = """
@@ -95,12 +95,12 @@ class PostProcessingBuilderTest(tf.test.TestCase):
_, score_converter = post_processing_builder.build(post_processing_config)
self.assertEqual(score_converter.__name__, 'identity_with_logit_scale')
- inputs = tf.constant([1, 1], tf.float32)
- outputs = score_converter(inputs)
- with self.test_session() as sess:
- converted_scores = sess.run(outputs)
- expected_converted_scores = sess.run(tf.constant([.5, .5], tf.float32))
- self.assertAllClose(converted_scores, expected_converted_scores)
+ def graph_fn():
+ inputs = tf.constant([1, 1], tf.float32)
+ outputs = score_converter(inputs)
+ return outputs
+ converted_scores = self.execute_cpu(graph_fn, [])
+ self.assertAllClose(converted_scores, [.5, .5])
def test_build_sigmoid_score_converter(self):
post_processing_text_proto = """
@@ -153,12 +153,12 @@ class PostProcessingBuilderTest(tf.test.TestCase):
self.assertEqual(calibrated_score_conversion_fn.__name__,
'calibrate_with_function_approximation')
- input_scores = tf.constant([1, 1], tf.float32)
- outputs = calibrated_score_conversion_fn(input_scores)
- with self.test_session() as sess:
- calibrated_scores = sess.run(outputs)
- expected_calibrated_scores = sess.run(tf.constant([0.5, 0.5], tf.float32))
- self.assertAllClose(calibrated_scores, expected_calibrated_scores)
+ def graph_fn():
+ input_scores = tf.constant([1, 1], tf.float32)
+ outputs = calibrated_score_conversion_fn(input_scores)
+ return outputs
+ calibrated_scores = self.execute_cpu(graph_fn, [])
+ self.assertAllClose(calibrated_scores, [0.5, 0.5])
def test_build_temperature_scaling_calibrator(self):
post_processing_text_proto = """
@@ -174,12 +174,12 @@ class PostProcessingBuilderTest(tf.test.TestCase):
self.assertEqual(calibrated_score_conversion_fn.__name__,
'calibrate_with_temperature_scaling_calibration')
- input_scores = tf.constant([1, 1], tf.float32)
- outputs = calibrated_score_conversion_fn(input_scores)
- with self.test_session() as sess:
- calibrated_scores = sess.run(outputs)
- expected_calibrated_scores = sess.run(tf.constant([0.5, 0.5], tf.float32))
- self.assertAllClose(calibrated_scores, expected_calibrated_scores)
+ def graph_fn():
+ input_scores = tf.constant([1, 1], tf.float32)
+ outputs = calibrated_score_conversion_fn(input_scores)
+ return outputs
+ calibrated_scores = self.execute_cpu(graph_fn, [])
+ self.assertAllClose(calibrated_scores, [0.5, 0.5])
if __name__ == '__main__':
tf.test.main()
diff --git a/research/object_detection/builders/preprocessor_builder.py b/research/object_detection/builders/preprocessor_builder.py
index aa6a6bc963f8635827aa8776252889d6c5f8d0e9..fe59039b10e47740614e58f913de2d80c6bdce0a 100644
--- a/research/object_detection/builders/preprocessor_builder.py
+++ b/research/object_detection/builders/preprocessor_builder.py
@@ -151,6 +151,7 @@ def build(preprocessor_step_config):
{
'keypoint_flip_permutation': tuple(
config.keypoint_flip_permutation) or None,
+ 'probability': config.probability or None,
})
if step_type == 'random_vertical_flip':
@@ -159,10 +160,17 @@ def build(preprocessor_step_config):
{
'keypoint_flip_permutation': tuple(
config.keypoint_flip_permutation) or None,
+ 'probability': config.probability or None,
})
if step_type == 'random_rotation90':
- return (preprocessor.random_rotation90, {})
+ config = preprocessor_step_config.random_rotation90
+ return (preprocessor.random_rotation90,
+ {
+ 'keypoint_rot_permutation': tuple(
+ config.keypoint_rot_permutation) or None,
+ 'probability': config.probability or None,
+ })
if step_type == 'random_crop_image':
config = preprocessor_step_config.random_crop_image
diff --git a/research/object_detection/builders/preprocessor_builder_test.py b/research/object_detection/builders/preprocessor_builder_test.py
index 4c283238c59695dace4d769b0a0fe0941a6a027c..9e90344d0478229fa95355b53ecfa5f876325936 100644
--- a/research/object_detection/builders/preprocessor_builder_test.py
+++ b/research/object_detection/builders/preprocessor_builder_test.py
@@ -65,13 +65,15 @@ class PreprocessorBuilderTest(tf.test.TestCase):
keypoint_flip_permutation: 3
keypoint_flip_permutation: 5
keypoint_flip_permutation: 4
+ probability: 0.5
}
"""
preprocessor_proto = preprocessor_pb2.PreprocessingStep()
text_format.Merge(preprocessor_text_proto, preprocessor_proto)
function, args = preprocessor_builder.build(preprocessor_proto)
self.assertEqual(function, preprocessor.random_horizontal_flip)
- self.assertEqual(args, {'keypoint_flip_permutation': (1, 0, 2, 3, 5, 4)})
+ self.assertEqual(args, {'keypoint_flip_permutation': (1, 0, 2, 3, 5, 4),
+ 'probability': 0.5})
def test_build_random_vertical_flip(self):
preprocessor_text_proto = """
@@ -82,23 +84,32 @@ class PreprocessorBuilderTest(tf.test.TestCase):
keypoint_flip_permutation: 3
keypoint_flip_permutation: 5
keypoint_flip_permutation: 4
+ probability: 0.5
}
"""
preprocessor_proto = preprocessor_pb2.PreprocessingStep()
text_format.Merge(preprocessor_text_proto, preprocessor_proto)
function, args = preprocessor_builder.build(preprocessor_proto)
self.assertEqual(function, preprocessor.random_vertical_flip)
- self.assertEqual(args, {'keypoint_flip_permutation': (1, 0, 2, 3, 5, 4)})
+ self.assertEqual(args, {'keypoint_flip_permutation': (1, 0, 2, 3, 5, 4),
+ 'probability': 0.5})
def test_build_random_rotation90(self):
preprocessor_text_proto = """
- random_rotation90 {}
+ random_rotation90 {
+ keypoint_rot_permutation: 3
+ keypoint_rot_permutation: 0
+ keypoint_rot_permutation: 1
+ keypoint_rot_permutation: 2
+ probability: 0.5
+ }
"""
preprocessor_proto = preprocessor_pb2.PreprocessingStep()
text_format.Merge(preprocessor_text_proto, preprocessor_proto)
function, args = preprocessor_builder.build(preprocessor_proto)
self.assertEqual(function, preprocessor.random_rotation90)
- self.assertEqual(args, {})
+ self.assertEqual(args, {'keypoint_rot_permutation': (3, 0, 1, 2),
+ 'probability': 0.5})
def test_build_random_pixel_value_scale(self):
preprocessor_text_proto = """
diff --git a/research/object_detection/colab_tutorials/context_rcnn_tutorial.ipynb b/research/object_detection/colab_tutorials/context_rcnn_tutorial.ipynb
new file mode 100644
index 0000000000000000000000000000000000000000..b735cfbcea0e2c5b7e7c44e706e68a59d98b68ec
--- /dev/null
+++ b/research/object_detection/colab_tutorials/context_rcnn_tutorial.ipynb
@@ -0,0 +1,1500 @@
+{
+ "nbformat": 4,
+ "nbformat_minor": 0,
+ "metadata": {
+ "accelerator": "GPU",
+ "colab": {
+ "name": "context_rcnn_tutorial.ipynb",
+ "provenance": [],
+ "collapsed_sections": []
+ },
+ "kernelspec": {
+ "display_name": "Python 3",
+ "name": "python3"
+ }
+ },
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "id": "jZc1kMel3sZP",
+ "colab_type": "text"
+ },
+ "source": [
+ "# Context R-CNN Demo\n",
+ "\n",
+ "
\n"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "id": "XuHWvdag3_b9",
+ "colab_type": "text"
+ },
+ "source": [
+ " This notebook will walk you step by step through the process of using a pre-trained model to build up a contextual memory bank for a set of images, and then detect objects in those images+context using [Context R-CNN](https://arxiv.org/abs/1912.03538)."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "id": "u0e-OOtn4hQ8",
+ "colab_type": "text"
+ },
+ "source": [
+ "# Setup"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "id": "w-UrhxBw4iLA",
+ "colab_type": "text"
+ },
+ "source": [
+ "Important: If you're running on a local machine, be sure to follow the [installation instructions](https://github.com/tensorflow/models/blob/master/research/object_detection/g3doc/installation.md). This notebook includes only what's necessary to run in Colab."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "id": "SAqMxS4V4lqS",
+ "colab_type": "text"
+ },
+ "source": [
+ "### Install"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "id": "BPkovrxF4o8n",
+ "colab_type": "code",
+ "outputId": "e1b8debc-ab73-4b3e-9e44-c86446c7cda1",
+ "colab": {
+ "base_uri": "https://localhost:8080/",
+ "height": 785
+ }
+ },
+ "source": [
+ "!pip install -U --pre tensorflow==\"2.*\"\n",
+ "!pip install tf_slim"
+ ],
+ "execution_count": 1,
+ "outputs": [
+ {
+ "output_type": "stream",
+ "text": [
+ "Requirement already up-to-date: tensorflow==2.* in /usr/local/lib/python3.6/dist-packages (2.2.0)\n",
+ "Requirement already satisfied, skipping upgrade: scipy==1.4.1; python_version >= \"3\" in /usr/local/lib/python3.6/dist-packages (from tensorflow==2.*) (1.4.1)\n",
+ "Requirement already satisfied, skipping upgrade: protobuf>=3.8.0 in /usr/local/lib/python3.6/dist-packages (from tensorflow==2.*) (3.10.0)\n",
+ "Requirement already satisfied, skipping upgrade: h5py<2.11.0,>=2.10.0 in /usr/local/lib/python3.6/dist-packages (from tensorflow==2.*) (2.10.0)\n",
+ "Requirement already satisfied, skipping upgrade: opt-einsum>=2.3.2 in /usr/local/lib/python3.6/dist-packages (from tensorflow==2.*) (3.2.1)\n",
+ "Requirement already satisfied, skipping upgrade: numpy<2.0,>=1.16.0 in /usr/local/lib/python3.6/dist-packages (from tensorflow==2.*) (1.18.5)\n",
+ "Requirement already satisfied, skipping upgrade: wheel>=0.26; python_version >= \"3\" in /usr/local/lib/python3.6/dist-packages (from tensorflow==2.*) (0.34.2)\n",
+ "Requirement already satisfied, skipping upgrade: absl-py>=0.7.0 in /usr/local/lib/python3.6/dist-packages (from tensorflow==2.*) (0.9.0)\n",
+ "Requirement already satisfied, skipping upgrade: tensorflow-estimator<2.3.0,>=2.2.0 in /usr/local/lib/python3.6/dist-packages (from tensorflow==2.*) (2.2.0)\n",
+ "Requirement already satisfied, skipping upgrade: google-pasta>=0.1.8 in /usr/local/lib/python3.6/dist-packages (from tensorflow==2.*) (0.2.0)\n",
+ "Requirement already satisfied, skipping upgrade: grpcio>=1.8.6 in /usr/local/lib/python3.6/dist-packages (from tensorflow==2.*) (1.29.0)\n",
+ "Requirement already satisfied, skipping upgrade: tensorboard<2.3.0,>=2.2.0 in /usr/local/lib/python3.6/dist-packages (from tensorflow==2.*) (2.2.2)\n",
+ "Requirement already satisfied, skipping upgrade: gast==0.3.3 in /usr/local/lib/python3.6/dist-packages (from tensorflow==2.*) (0.3.3)\n",
+ "Requirement already satisfied, skipping upgrade: astunparse==1.6.3 in /usr/local/lib/python3.6/dist-packages (from tensorflow==2.*) (1.6.3)\n",
+ "Requirement already satisfied, skipping upgrade: keras-preprocessing>=1.1.0 in /usr/local/lib/python3.6/dist-packages (from tensorflow==2.*) (1.1.2)\n",
+ "Requirement already satisfied, skipping upgrade: termcolor>=1.1.0 in /usr/local/lib/python3.6/dist-packages (from tensorflow==2.*) (1.1.0)\n",
+ "Requirement already satisfied, skipping upgrade: six>=1.12.0 in /usr/local/lib/python3.6/dist-packages (from tensorflow==2.*) (1.12.0)\n",
+ "Requirement already satisfied, skipping upgrade: wrapt>=1.11.1 in /usr/local/lib/python3.6/dist-packages (from tensorflow==2.*) (1.12.1)\n",
+ "Requirement already satisfied, skipping upgrade: setuptools in /usr/local/lib/python3.6/dist-packages (from protobuf>=3.8.0->tensorflow==2.*) (47.1.1)\n",
+ "Requirement already satisfied, skipping upgrade: google-auth<2,>=1.6.3 in /usr/local/lib/python3.6/dist-packages (from tensorboard<2.3.0,>=2.2.0->tensorflow==2.*) (1.7.2)\n",
+ "Requirement already satisfied, skipping upgrade: werkzeug>=0.11.15 in /usr/local/lib/python3.6/dist-packages (from tensorboard<2.3.0,>=2.2.0->tensorflow==2.*) (1.0.1)\n",
+ "Requirement already satisfied, skipping upgrade: requests<3,>=2.21.0 in /usr/local/lib/python3.6/dist-packages (from tensorboard<2.3.0,>=2.2.0->tensorflow==2.*) (2.23.0)\n",
+ "Requirement already satisfied, skipping upgrade: google-auth-oauthlib<0.5,>=0.4.1 in /usr/local/lib/python3.6/dist-packages (from tensorboard<2.3.0,>=2.2.0->tensorflow==2.*) (0.4.1)\n",
+ "Requirement already satisfied, skipping upgrade: markdown>=2.6.8 in /usr/local/lib/python3.6/dist-packages (from tensorboard<2.3.0,>=2.2.0->tensorflow==2.*) (3.2.2)\n",
+ "Requirement already satisfied, skipping upgrade: tensorboard-plugin-wit>=1.6.0 in /usr/local/lib/python3.6/dist-packages (from tensorboard<2.3.0,>=2.2.0->tensorflow==2.*) (1.6.0.post3)\n",
+ "Requirement already satisfied, skipping upgrade: cachetools<3.2,>=2.0.0 in /usr/local/lib/python3.6/dist-packages (from google-auth<2,>=1.6.3->tensorboard<2.3.0,>=2.2.0->tensorflow==2.*) (3.1.1)\n",
+ "Requirement already satisfied, skipping upgrade: rsa<4.1,>=3.1.4 in /usr/local/lib/python3.6/dist-packages (from google-auth<2,>=1.6.3->tensorboard<2.3.0,>=2.2.0->tensorflow==2.*) (4.0)\n",
+ "Requirement already satisfied, skipping upgrade: pyasn1-modules>=0.2.1 in /usr/local/lib/python3.6/dist-packages (from google-auth<2,>=1.6.3->tensorboard<2.3.0,>=2.2.0->tensorflow==2.*) (0.2.8)\n",
+ "Requirement already satisfied, skipping upgrade: idna<3,>=2.5 in /usr/local/lib/python3.6/dist-packages (from requests<3,>=2.21.0->tensorboard<2.3.0,>=2.2.0->tensorflow==2.*) (2.9)\n",
+ "Requirement already satisfied, skipping upgrade: chardet<4,>=3.0.2 in /usr/local/lib/python3.6/dist-packages (from requests<3,>=2.21.0->tensorboard<2.3.0,>=2.2.0->tensorflow==2.*) (3.0.4)\n",
+ "Requirement already satisfied, skipping upgrade: certifi>=2017.4.17 in /usr/local/lib/python3.6/dist-packages (from requests<3,>=2.21.0->tensorboard<2.3.0,>=2.2.0->tensorflow==2.*) (2020.4.5.1)\n",
+ "Requirement already satisfied, skipping upgrade: urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1 in /usr/local/lib/python3.6/dist-packages (from requests<3,>=2.21.0->tensorboard<2.3.0,>=2.2.0->tensorflow==2.*) (1.24.3)\n",
+ "Requirement already satisfied, skipping upgrade: requests-oauthlib>=0.7.0 in /usr/local/lib/python3.6/dist-packages (from google-auth-oauthlib<0.5,>=0.4.1->tensorboard<2.3.0,>=2.2.0->tensorflow==2.*) (1.3.0)\n",
+ "Requirement already satisfied, skipping upgrade: importlib-metadata; python_version < \"3.8\" in /usr/local/lib/python3.6/dist-packages (from markdown>=2.6.8->tensorboard<2.3.0,>=2.2.0->tensorflow==2.*) (1.6.0)\n",
+ "Requirement already satisfied, skipping upgrade: pyasn1>=0.1.3 in /usr/local/lib/python3.6/dist-packages (from rsa<4.1,>=3.1.4->google-auth<2,>=1.6.3->tensorboard<2.3.0,>=2.2.0->tensorflow==2.*) (0.4.8)\n",
+ "Requirement already satisfied, skipping upgrade: oauthlib>=3.0.0 in /usr/local/lib/python3.6/dist-packages (from requests-oauthlib>=0.7.0->google-auth-oauthlib<0.5,>=0.4.1->tensorboard<2.3.0,>=2.2.0->tensorflow==2.*) (3.1.0)\n",
+ "Requirement already satisfied, skipping upgrade: zipp>=0.5 in /usr/local/lib/python3.6/dist-packages (from importlib-metadata; python_version < \"3.8\"->markdown>=2.6.8->tensorboard<2.3.0,>=2.2.0->tensorflow==2.*) (3.1.0)\n",
+ "Collecting tf_slim\n",
+ "\u001b[?25l Downloading https://files.pythonhosted.org/packages/02/97/b0f4a64df018ca018cc035d44f2ef08f91e2e8aa67271f6f19633a015ff7/tf_slim-1.1.0-py2.py3-none-any.whl (352kB)\n",
+ "\u001b[K |████████████████████████████████| 358kB 2.8MB/s \n",
+ "\u001b[?25hRequirement already satisfied: absl-py>=0.2.2 in /usr/local/lib/python3.6/dist-packages (from tf_slim) (0.9.0)\n",
+ "Requirement already satisfied: six in /usr/local/lib/python3.6/dist-packages (from absl-py>=0.2.2->tf_slim) (1.12.0)\n",
+ "Installing collected packages: tf-slim\n",
+ "Successfully installed tf-slim-1.1.0\n"
+ ],
+ "name": "stdout"
+ }
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "id": "zpKF8a2x4tec",
+ "colab_type": "text"
+ },
+ "source": [
+ "Make sure you have `pycocotools` installed"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "id": "owcrp0AW4uCg",
+ "colab_type": "code",
+ "outputId": "001148a8-b0a8-43a1-f6df-225d86d90b8f",
+ "colab": {
+ "base_uri": "https://localhost:8080/",
+ "height": 34
+ }
+ },
+ "source": [
+ "!pip install pycocotools"
+ ],
+ "execution_count": 2,
+ "outputs": [
+ {
+ "output_type": "stream",
+ "text": [
+ "Requirement already satisfied: pycocotools in /usr/local/lib/python3.6/dist-packages (2.0.0)\n"
+ ],
+ "name": "stdout"
+ }
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "id": "wHFSRVaO4wuq",
+ "colab_type": "text"
+ },
+ "source": [
+ "Get `tensorflow/models` or `cd` to parent directory of the repository."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "id": "E0ZuGKoi4wTn",
+ "colab_type": "code",
+ "outputId": "2b5d93cb-3548-4347-9b76-ce12bea44a56",
+ "colab": {
+ "base_uri": "https://localhost:8080/",
+ "height": 136
+ }
+ },
+ "source": [
+ "import os\n",
+ "import pathlib\n",
+ "\n",
+ "\n",
+ "if \"models\" in pathlib.Path.cwd().parts:\n",
+ " while \"models\" in pathlib.Path.cwd().parts:\n",
+ " os.chdir('..')\n",
+ "elif not pathlib.Path('models').exists():\n",
+ " !git clone --depth 1 https://github.com/tensorflow/models"
+ ],
+ "execution_count": 3,
+ "outputs": [
+ {
+ "output_type": "stream",
+ "text": [
+ "Cloning into 'models'...\n",
+ "remote: Enumerating objects: 2694, done.\u001b[K\n",
+ "remote: Counting objects: 100% (2694/2694), done.\u001b[K\n",
+ "remote: Compressing objects: 100% (2370/2370), done.\u001b[K\n",
+ "remote: Total 2694 (delta 520), reused 1332 (delta 290), pack-reused 0\u001b[K\n",
+ "Receiving objects: 100% (2694/2694), 34.10 MiB | 29.32 MiB/s, done.\n",
+ "Resolving deltas: 100% (520/520), done.\n"
+ ],
+ "name": "stdout"
+ }
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "id": "GkqRm-WY47MR",
+ "colab_type": "text"
+ },
+ "source": [
+ "Compile protobufs and install the object_detection package"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "id": "62Dn1_YU45O2",
+ "colab_type": "code",
+ "outputId": "439166dd-6202-4ff9-897d-100a35ae5af5",
+ "colab": {
+ "base_uri": "https://localhost:8080/",
+ "height": 54
+ }
+ },
+ "source": [
+ "%%bash\n",
+ "cd models/research/\n",
+ "protoc object_detection/protos/*.proto --python_out=."
+ ],
+ "execution_count": 4,
+ "outputs": [
+ {
+ "output_type": "stream",
+ "text": [
+ "object_detection/protos/input_reader.proto: warning: Import object_detection/protos/image_resizer.proto but not used.\n"
+ ],
+ "name": "stderr"
+ }
+ ]
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "id": "83kNiD-24-ZB",
+ "colab_type": "code",
+ "outputId": "aa148939-7dcc-4fbd-ea48-41236523712c",
+ "colab": {
+ "base_uri": "https://localhost:8080/",
+ "height": 343
+ }
+ },
+ "source": [
+ "%%bash \n",
+ "cd models/research\n",
+ "pip install ."
+ ],
+ "execution_count": 5,
+ "outputs": [
+ {
+ "output_type": "stream",
+ "text": [
+ "Processing /content/models/research\n",
+ "Requirement already satisfied: Pillow>=1.0 in /usr/local/lib/python3.6/dist-packages (from object-detection==0.1) (7.0.0)\n",
+ "Requirement already satisfied: Matplotlib>=2.1 in /usr/local/lib/python3.6/dist-packages (from object-detection==0.1) (3.2.1)\n",
+ "Requirement already satisfied: Cython>=0.28.1 in /usr/local/lib/python3.6/dist-packages (from object-detection==0.1) (0.29.19)\n",
+ "Requirement already satisfied: cycler>=0.10 in /usr/local/lib/python3.6/dist-packages (from Matplotlib>=2.1->object-detection==0.1) (0.10.0)\n",
+ "Requirement already satisfied: pyparsing!=2.0.4,!=2.1.2,!=2.1.6,>=2.0.1 in /usr/local/lib/python3.6/dist-packages (from Matplotlib>=2.1->object-detection==0.1) (2.4.7)\n",
+ "Requirement already satisfied: numpy>=1.11 in /usr/local/lib/python3.6/dist-packages (from Matplotlib>=2.1->object-detection==0.1) (1.18.5)\n",
+ "Requirement already satisfied: python-dateutil>=2.1 in /usr/local/lib/python3.6/dist-packages (from Matplotlib>=2.1->object-detection==0.1) (2.8.1)\n",
+ "Requirement already satisfied: kiwisolver>=1.0.1 in /usr/local/lib/python3.6/dist-packages (from Matplotlib>=2.1->object-detection==0.1) (1.2.0)\n",
+ "Requirement already satisfied: six in /usr/local/lib/python3.6/dist-packages (from cycler>=0.10->Matplotlib>=2.1->object-detection==0.1) (1.12.0)\n",
+ "Building wheels for collected packages: object-detection\n",
+ " Building wheel for object-detection (setup.py): started\n",
+ " Building wheel for object-detection (setup.py): finished with status 'done'\n",
+ " Created wheel for object-detection: filename=object_detection-0.1-cp36-none-any.whl size=1141324 sha256=1dff68de415a4ccc3af0e20b8f409a73d147d79720a713dcdc30f9bc8d4ab3a2\n",
+ " Stored in directory: /tmp/pip-ephem-wheel-cache-rlyj8yrw/wheels/94/49/4b/39b051683087a22ef7e80ec52152a27249d1a644ccf4e442ea\n",
+ "Successfully built object-detection\n",
+ "Installing collected packages: object-detection\n",
+ "Successfully installed object-detection-0.1\n"
+ ],
+ "name": "stdout"
+ }
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "colab_type": "text",
+ "id": "LBdjK2G5ywuc"
+ },
+ "source": [
+ "### Imports"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "colab_type": "code",
+ "id": "hV4P5gyTWKMI",
+ "colab": {}
+ },
+ "source": [
+ "import numpy as np\n",
+ "import os\n",
+ "import six\n",
+ "import six.moves.urllib as urllib\n",
+ "import sys\n",
+ "import tarfile\n",
+ "import tensorflow as tf\n",
+ "import zipfile\n",
+ "import pathlib\n",
+ "import json\n",
+ "import datetime\n",
+ "import matplotlib.pyplot as plt\n",
+ "\n",
+ "from collections import defaultdict\n",
+ "from io import StringIO\n",
+ "from matplotlib import pyplot as plt\n",
+ "from PIL import Image\n",
+ "from IPython.display import display"
+ ],
+ "execution_count": 0,
+ "outputs": []
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "colab_type": "text",
+ "id": "r5FNuiRPWKMN"
+ },
+ "source": [
+ "Import the object detection module."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "colab_type": "code",
+ "id": "4-IMl4b6BdGO",
+ "colab": {}
+ },
+ "source": [
+ "from object_detection.utils import ops as utils_ops\n",
+ "from object_detection.utils import label_map_util\n",
+ "from object_detection.utils import visualization_utils as vis_utils"
+ ],
+ "execution_count": 0,
+ "outputs": []
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "colab_type": "text",
+ "id": "RYPCiag2iz_q"
+ },
+ "source": [
+ "Patches:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "colab_type": "code",
+ "id": "mF-YlMl8c_bM",
+ "colab": {}
+ },
+ "source": [
+ "# patch tf1 into `utils.ops`\n",
+ "utils_ops.tf = tf.compat.v1\n",
+ "\n",
+ "# Patch the location of gfile\n",
+ "tf.gfile = tf.io.gfile"
+ ],
+ "execution_count": 0,
+ "outputs": []
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "colab_type": "text",
+ "id": "cfn_tRFOWKMO"
+ },
+ "source": [
+ "# Model preparation "
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "colab_type": "text",
+ "id": "7ai8pLZZWKMS"
+ },
+ "source": [
+ "## Loader"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "colab_type": "code",
+ "id": "zm8xp-0eoItE",
+ "colab": {}
+ },
+ "source": [
+ "def load_model(model_name):\n",
+ " base_url = 'http://download.tensorflow.org/models/object_detection/'\n",
+ " model_file = model_name + '.tar.gz'\n",
+ " model_dir = tf.keras.utils.get_file(\n",
+ " fname=model_name,\n",
+ " origin=base_url + model_file,\n",
+ " untar=True)\n",
+ "\n",
+ " model_dir = pathlib.Path(model_dir)/\"saved_model\"\n",
+ " model = tf.saved_model.load(str(model_dir))\n",
+ " model = model.signatures['serving_default']\n",
+ "\n",
+ " return model"
+ ],
+ "execution_count": 0,
+ "outputs": []
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "colab_type": "text",
+ "id": "_1MVVTcLWKMW"
+ },
+ "source": [
+ "## Loading label map\n",
+ "Label maps map indices to category names, so that when our convolution network predicts `5`, we know that this corresponds to `zebra`. Here we use internal utility functions, but anything that returns a dictionary mapping integers to appropriate string labels would be fine"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "colab_type": "code",
+ "id": "hDbpHkiWWKMX",
+ "colab": {}
+ },
+ "source": [
+ "# List of the strings that is used to add correct label for each box.\n",
+ "PATH_TO_LABELS = 'models/research/object_detection/data/snapshot_serengeti_label_map.pbtxt'\n",
+ "category_index = label_map_util.create_category_index_from_labelmap(PATH_TO_LABELS, use_display_name=False)"
+ ],
+ "execution_count": 0,
+ "outputs": []
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "colab_type": "text",
+ "id": "oVU3U_J6IJVb"
+ },
+ "source": [
+ "We will test on a context group of images from one month at one camera from the Snapshot Serengeti val split defined on [LILA.science](http://lila.science/datasets/snapshot-serengeti), which was not seen during model training:\n",
+ "\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "colab_type": "code",
+ "id": "jG-zn5ykWKMd",
+ "outputId": "c7bbbb2f-0f6e-4380-fd92-c88c088bd766",
+ "colab": {
+ "base_uri": "https://localhost:8080/",
+ "height": 85
+ }
+ },
+ "source": [
+ "# If you want to test the code with your images, just add path to the images to\n",
+ "# the TEST_IMAGE_PATHS.\n",
+ "PATH_TO_TEST_IMAGES_DIR = pathlib.Path('models/research/object_detection/test_images/snapshot_serengeti')\n",
+ "TEST_IMAGE_PATHS = sorted(list(PATH_TO_TEST_IMAGES_DIR.glob(\"*.jpeg\")))\n",
+ "TEST_IMAGE_PATHS"
+ ],
+ "execution_count": 11,
+ "outputs": [
+ {
+ "output_type": "execute_result",
+ "data": {
+ "text/plain": [
+ "[PosixPath('models/research/object_detection/test_images/snapshot_serengeti/S1_E03_R3_PICT0038.jpeg'),\n",
+ " PosixPath('models/research/object_detection/test_images/snapshot_serengeti/S1_E03_R3_PICT0039.jpeg'),\n",
+ " PosixPath('models/research/object_detection/test_images/snapshot_serengeti/S1_E03_R3_PICT0040.jpeg'),\n",
+ " PosixPath('models/research/object_detection/test_images/snapshot_serengeti/S1_E03_R3_PICT0041.jpeg')]"
+ ]
+ },
+ "metadata": {
+ "tags": []
+ },
+ "execution_count": 11
+ }
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "id": "oBcQzptnQ-x6",
+ "colab_type": "text"
+ },
+ "source": [
+ "Load the metadata for each image"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "id": "ZLLINOHcQ-An",
+ "colab_type": "code",
+ "colab": {}
+ },
+ "source": [
+ "test_data_json = 'models/research/object_detection/test_images/snapshot_serengeti/context_rcnn_demo_metadata.json'\n",
+ "with open(test_data_json, 'r') as f:\n",
+ " test_metadata = json.load(f)"
+ ],
+ "execution_count": 0,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "id": "BgGTPHhkOAel",
+ "colab_type": "code",
+ "outputId": "1421a32a-c208-498f-931f-1bfeb25d6488",
+ "colab": {
+ "base_uri": "https://localhost:8080/",
+ "height": 105
+ }
+ },
+ "source": [
+ "image_id_to_datetime = {im['id']:im['date_captured'] for im in test_metadata['images']}\n",
+ "image_path_to_id = {im['file_name']: im['id'] \n",
+ " for im in test_metadata['images']}\n",
+ "image_path_to_id"
+ ],
+ "execution_count": 13,
+ "outputs": [
+ {
+ "output_type": "execute_result",
+ "data": {
+ "text/plain": [
+ "{'models/research/object_detection/test_images/snapshot_serengeti/S1_E03_R3_PICT0038.jpeg': 'S1/E03/E03_R3/S1_E03_R3_PICT0038',\n",
+ " 'models/research/object_detection/test_images/snapshot_serengeti/S1_E03_R3_PICT0039.jpeg': 'S1/E03/E03_R3/S1_E03_R3_PICT0039',\n",
+ " 'models/research/object_detection/test_images/snapshot_serengeti/S1_E03_R3_PICT0040.jpeg': 'S1/E03/E03_R3/S1_E03_R3_PICT0040',\n",
+ " 'models/research/object_detection/test_images/snapshot_serengeti/S1_E03_R3_PICT0041.jpeg': 'S1/E03/E03_R3/S1_E03_R3_PICT0041'}"
+ ]
+ },
+ "metadata": {
+ "tags": []
+ },
+ "execution_count": 13
+ }
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "colab_type": "text",
+ "id": "H0_1AGhrWKMc"
+ },
+ "source": [
+ "# Generate Context Features for each image"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "id": "kt3_pPQOj7ii",
+ "colab_type": "code",
+ "outputId": "fc72e978-f576-43f4-bcf1-3eb49fef5726",
+ "colab": {
+ "base_uri": "https://localhost:8080/",
+ "height": 88
+ }
+ },
+ "source": [
+ "faster_rcnn_model_name = 'faster_rcnn_resnet101_snapshot_serengeti_2020_06_10'\n",
+ "faster_rcnn_model = load_model(faster_rcnn_model_name)"
+ ],
+ "execution_count": 14,
+ "outputs": [
+ {
+ "output_type": "stream",
+ "text": [
+ "Downloading data from http://download.tensorflow.org/models/object_detection/faster_rcnn_resnet101_snapshot_serengeti_2020_06_10.tar.gz\n",
+ "588832768/588829839 [==============================] - 3s 0us/step\n",
+ "INFO:tensorflow:Saver not created because there are no variables in the graph to restore\n"
+ ],
+ "name": "stdout"
+ }
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "id": "k6Clkv_mBo_U",
+ "colab_type": "text"
+ },
+ "source": [
+ "Check the model's input signature, it expects a batch of 3-color images of type uint8."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "id": "H1qNlFESBsTR",
+ "colab_type": "code",
+ "outputId": "9b8b84e0-d7a8-4ec9-d6e0-22d574cb6209",
+ "colab": {
+ "base_uri": "https://localhost:8080/",
+ "height": 34
+ }
+ },
+ "source": [
+ "faster_rcnn_model.inputs"
+ ],
+ "execution_count": 15,
+ "outputs": [
+ {
+ "output_type": "execute_result",
+ "data": {
+ "text/plain": [
+ "[]"
+ ]
+ },
+ "metadata": {
+ "tags": []
+ },
+ "execution_count": 15
+ }
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "id": "eYS8KpRCBtBH",
+ "colab_type": "text"
+ },
+ "source": [
+ "And it returns several outputs. Note this model has been exported with additional output 'detection_features' which will be used to build the contextual memory bank."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "id": "5M-1yxgfkmQl",
+ "colab_type": "code",
+ "outputId": "1da98c3b-79c5-4d19-d64c-3e9dbadc97c0",
+ "colab": {
+ "base_uri": "https://localhost:8080/",
+ "height": 153
+ }
+ },
+ "source": [
+ "faster_rcnn_model.output_dtypes"
+ ],
+ "execution_count": 16,
+ "outputs": [
+ {
+ "output_type": "execute_result",
+ "data": {
+ "text/plain": [
+ "{'detection_boxes': tf.float32,\n",
+ " 'detection_classes': tf.float32,\n",
+ " 'detection_features': tf.float32,\n",
+ " 'detection_multiclass_scores': tf.float32,\n",
+ " 'detection_scores': tf.float32,\n",
+ " 'num_detections': tf.float32,\n",
+ " 'raw_detection_boxes': tf.float32,\n",
+ " 'raw_detection_scores': tf.float32}"
+ ]
+ },
+ "metadata": {
+ "tags": []
+ },
+ "execution_count": 16
+ }
+ ]
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "id": "zVjNFFNIDCst",
+ "colab_type": "code",
+ "outputId": "edb46db0-05fb-4952-bc88-db09d7811b01",
+ "colab": {
+ "base_uri": "https://localhost:8080/",
+ "height": 153
+ }
+ },
+ "source": [
+ "faster_rcnn_model.output_shapes"
+ ],
+ "execution_count": 17,
+ "outputs": [
+ {
+ "output_type": "execute_result",
+ "data": {
+ "text/plain": [
+ "{'detection_boxes': TensorShape([None, 300, 4]),\n",
+ " 'detection_classes': TensorShape([None, 300]),\n",
+ " 'detection_features': TensorShape([None, None, None, None, None]),\n",
+ " 'detection_multiclass_scores': TensorShape([None, 300, 49]),\n",
+ " 'detection_scores': TensorShape([None, 300]),\n",
+ " 'num_detections': TensorShape([None]),\n",
+ " 'raw_detection_boxes': TensorShape([None, 300, 4]),\n",
+ " 'raw_detection_scores': TensorShape([None, 300, 49])}"
+ ]
+ },
+ "metadata": {
+ "tags": []
+ },
+ "execution_count": 17
+ }
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "colab_type": "text",
+ "id": "JP5qZ7sXJpwG"
+ },
+ "source": [
+ "Add a wrapper function to call the model, and cleanup the outputs:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "colab_type": "code",
+ "id": "ajmR_exWyN76",
+ "colab": {}
+ },
+ "source": [
+ "def run_inference_for_single_image(model, image):\n",
+ " '''Run single image through tensorflow object detection saved_model.\n",
+ "\n",
+ " This function runs a saved_model on a (single) provided image and returns\n",
+ " inference results in numpy arrays.\n",
+ "\n",
+ " Args:\n",
+ " model: tensorflow saved_model. This model can be obtained using \n",
+ " export_inference_graph.py.\n",
+ " image: uint8 numpy array with shape (img_height, img_width, 3)\n",
+ "\n",
+ " Returns:\n",
+ " output_dict: a dictionary holding the following entries:\n",
+ " `num_detections`: an integer\n",
+ " `detection_boxes`: a numpy (float32) array of shape [N, 4]\n",
+ " `detection_classes`: a numpy (uint8) array of shape [N]\n",
+ " `detection_scores`: a numpy (float32) array of shape [N]\n",
+ " `detection_features`: a numpy (float32) array of shape [N, 7, 7, 2048]\n",
+ " '''\n",
+ " image = np.asarray(image)\n",
+ " # The input needs to be a tensor, convert it using `tf.convert_to_tensor`.\n",
+ " input_tensor = tf.convert_to_tensor(image)\n",
+ " # The model expects a batch of images, so add an axis with `tf.newaxis`.\n",
+ " input_tensor = input_tensor[tf.newaxis,...]\n",
+ "\n",
+ " # Run inference\n",
+ " output_dict = model(input_tensor)\n",
+ " # All outputs are batches tensors.\n",
+ " # Convert to numpy arrays, and take index [0] to remove the batch dimension.\n",
+ " # We're only interested in the first num_detections.\n",
+ " num_dets = output_dict.pop('num_detections')\n",
+ " num_detections = int(num_dets)\n",
+ " for key,value in output_dict.items():\n",
+ " output_dict[key] = value[0, :num_detections].numpy() \n",
+ " output_dict['num_detections'] = num_detections\n",
+ "\n",
+ " # detection_classes should be ints.\n",
+ " output_dict['detection_classes'] = output_dict['detection_classes'].astype(\n",
+ " np.int64)\n",
+ " return output_dict"
+ ],
+ "execution_count": 0,
+ "outputs": []
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "id": "un5SXxIxMaaV",
+ "colab_type": "text"
+ },
+ "source": [
+ "Functions for embedding context features"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "id": "qvtvAZFDMoTM",
+ "colab_type": "code",
+ "colab": {}
+ },
+ "source": [
+ "def embed_date_captured(date_captured):\n",
+ " \"\"\"Encodes the datetime of the image.\n",
+ "\n",
+ " Takes a datetime object and encodes it into a normalized embedding of shape \n",
+ " [5], using hard-coded normalization factors for year, month, day, hour,\n",
+ " minute.\n",
+ "\n",
+ " Args:\n",
+ " date_captured: A datetime object.\n",
+ "\n",
+ " Returns:\n",
+ " A numpy float32 embedding of shape [5].\n",
+ " \"\"\"\n",
+ " embedded_date_captured = []\n",
+ " month_max = 12.0\n",
+ " day_max = 31.0\n",
+ " hour_max = 24.0\n",
+ " minute_max = 60.0\n",
+ " min_year = 1990.0\n",
+ " max_year = 2030.0\n",
+ "\n",
+ " year = (date_captured.year-min_year)/float(max_year-min_year)\n",
+ " embedded_date_captured.append(year)\n",
+ "\n",
+ " month = (date_captured.month-1)/month_max\n",
+ " embedded_date_captured.append(month)\n",
+ "\n",
+ " day = (date_captured.day-1)/day_max\n",
+ " embedded_date_captured.append(day)\n",
+ "\n",
+ " hour = date_captured.hour/hour_max\n",
+ " embedded_date_captured.append(hour)\n",
+ "\n",
+ " minute = date_captured.minute/minute_max\n",
+ " embedded_date_captured.append(minute)\n",
+ "\n",
+ " return np.asarray(embedded_date_captured)"
+ ],
+ "execution_count": 0,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "id": "xN8k5daOOA7b",
+ "colab_type": "code",
+ "colab": {}
+ },
+ "source": [
+ "def embed_position_and_size(box):\n",
+ " \"\"\"Encodes the bounding box of the object of interest.\n",
+ "\n",
+ " Takes a bounding box and encodes it into a normalized embedding of shape \n",
+ " [4] - the center point (x,y) and width and height of the box.\n",
+ "\n",
+ " Args:\n",
+ " box: A bounding box, formatted as [ymin, xmin, ymax, xmax].\n",
+ "\n",
+ " Returns:\n",
+ " A numpy float32 embedding of shape [4].\n",
+ " \"\"\"\n",
+ " ymin = box[0]\n",
+ " xmin = box[1]\n",
+ " ymax = box[2]\n",
+ " xmax = box[3]\n",
+ " w = xmax - xmin\n",
+ " h = ymax - ymin\n",
+ " x = xmin + w / 2.0\n",
+ " y = ymin + h / 2.0\n",
+ " return np.asarray([x, y, w, h])"
+ ],
+ "execution_count": 0,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "id": "lJe2qy8HPc6Z",
+ "colab_type": "code",
+ "colab": {}
+ },
+ "source": [
+ "def get_context_feature_embedding(date_captured, detection_boxes,\n",
+ " detection_features, detection_scores):\n",
+ " \"\"\"Extracts representative feature embedding for a given input image.\n",
+ "\n",
+ " Takes outputs of a detection model and focuses on the highest-confidence\n",
+ " detected object. Starts with detection_features and uses average pooling to\n",
+ " remove the spatial dimensions, then appends an embedding of the box position\n",
+ " and size, and an embedding of the date and time the image was captured,\n",
+ " returning a one-dimensional representation of the object.\n",
+ "\n",
+ " Args:\n",
+ " date_captured: A datetime string of format '%Y-%m-%d %H:%M:%S'.\n",
+ " detection_features: A numpy (float32) array of shape [N, 7, 7, 2048].\n",
+ " detection_boxes: A numpy (float32) array of shape [N, 4].\n",
+ " detection_scores: A numpy (float32) array of shape [N].\n",
+ "\n",
+ " Returns:\n",
+ " A numpy float32 embedding of shape [2057].\n",
+ " \"\"\"\n",
+ " date_captured = datetime.datetime.strptime(date_captured,'%Y-%m-%d %H:%M:%S')\n",
+ " temporal_embedding = embed_date_captured(date_captured)\n",
+ " embedding = detection_features[0]\n",
+ " pooled_embedding = np.mean(np.mean(embedding, axis=1), axis=0)\n",
+ " box = detection_boxes[0]\n",
+ " position_embedding = embed_position_and_size(box)\n",
+ " bb_embedding = np.concatenate((pooled_embedding, position_embedding))\n",
+ " embedding = np.expand_dims(np.concatenate((bb_embedding,temporal_embedding)),\n",
+ " axis=0)\n",
+ " score = detection_scores[0]\n",
+ " return embedding, score"
+ ],
+ "execution_count": 0,
+ "outputs": []
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "colab_type": "text",
+ "id": "z1wq0LVyMRR_"
+ },
+ "source": [
+ "Run it on each test image and use the output detection features and metadata to build up a context feature bank:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "colab_type": "code",
+ "id": "DWh_1zz6aqxs",
+ "colab": {}
+ },
+ "source": [
+ "def run_inference(model, image_path, date_captured, resize_image=True):\n",
+ " \"\"\"Runs inference over a single input image and extracts contextual features.\n",
+ "\n",
+ " Args:\n",
+ " model: A tensorflow saved_model object.\n",
+ " image_path: Absolute path to the input image.\n",
+ " date_captured: A datetime string of format '%Y-%m-%d %H:%M:%S'.\n",
+ " resize_image: Whether to resize the input image before running inference.\n",
+ "\n",
+ " Returns:\n",
+ " context_feature: A numpy float32 array of shape [2057].\n",
+ " score: A numpy float32 object score for the embedded object.\n",
+ " output_dict: The saved_model output dictionary for the image.\n",
+ " \"\"\"\n",
+ " with open(image_path,'rb') as f:\n",
+ " image = Image.open(f)\n",
+ " if resize_image:\n",
+ " image.thumbnail((640,640),Image.ANTIALIAS)\n",
+ " image_np = np.array(image)\n",
+ "\n",
+ " # Actual detection.\n",
+ " output_dict = run_inference_for_single_image(model, image_np)\n",
+ "\n",
+ " context_feature, score = get_context_feature_embedding(\n",
+ " date_captured, output_dict['detection_boxes'],\n",
+ " output_dict['detection_features'], output_dict['detection_scores'])\n",
+ " return context_feature, score, output_dict"
+ ],
+ "execution_count": 0,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "colab_type": "code",
+ "id": "3a5wMHN8WKMh",
+ "colab": {}
+ },
+ "source": [
+ "context_features = []\n",
+ "scores = []\n",
+ "faster_rcnn_results = {}\n",
+ "for image_path in TEST_IMAGE_PATHS:\n",
+ " image_id = image_path_to_id[str(image_path)]\n",
+ " date_captured = image_id_to_datetime[image_id]\n",
+ " context_feature, score, results = run_inference(\n",
+ " faster_rcnn_model, image_path, date_captured)\n",
+ " faster_rcnn_results[image_id] = results\n",
+ " context_features.append(context_feature)\n",
+ " scores.append(score)\n",
+ "\n",
+ "# Concatenate all extracted context embeddings into a contextual memory bank.\n",
+ "context_features_matrix = np.concatenate(context_features, axis=0)\n"
+ ],
+ "execution_count": 0,
+ "outputs": []
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "colab_type": "text",
+ "id": "DsspMPX3Cssg"
+ },
+ "source": [
+ "## Run Detection With Context"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "colab_type": "text",
+ "id": "f7aOtOlebK7h"
+ },
+ "source": [
+ "Load a context r-cnn object detection model:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "colab_type": "code",
+ "id": "1XNT0wxybKR6",
+ "outputId": "cc5b0677-cf16-46c2-9ae5-32681725f856",
+ "colab": {
+ "base_uri": "https://localhost:8080/",
+ "height": 88
+ }
+ },
+ "source": [
+ "context_rcnn_model_name = 'context_rcnn_resnet101_snapshot_serengeti_2020_06_10'\n",
+ "context_rcnn_model = load_model(context_rcnn_model_name)\n"
+ ],
+ "execution_count": 24,
+ "outputs": [
+ {
+ "output_type": "stream",
+ "text": [
+ "Downloading data from http://download.tensorflow.org/models/object_detection/context_rcnn_resnet101_snapshot_serengeti_2020_06_10.tar.gz\n",
+ "724664320/724658931 [==============================] - 3s 0us/step\n",
+ "INFO:tensorflow:Saver not created because there are no variables in the graph to restore\n"
+ ],
+ "name": "stdout"
+ }
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "id": "G6IGGtGqBH6y",
+ "colab_type": "text"
+ },
+ "source": [
+ "We need to define the expected context padding size for the\n",
+ "model, this must match the definition in the model config (max_num_context_features)."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "id": "4oh9XNLBjkTL",
+ "colab_type": "code",
+ "colab": {}
+ },
+ "source": [
+ "context_padding_size = 2000"
+ ],
+ "execution_count": 0,
+ "outputs": []
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "colab_type": "text",
+ "id": "yN1AYfAEJIGp"
+ },
+ "source": [
+ "Check the model's input signature, it expects a batch of 3-color images of type uint8, plus context_features padded to the maximum context feature size for this model (2000) and valid_context_size to represent the non-padded context features: "
+ ]
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "colab_type": "code",
+ "id": "CK4cnry6wsHY",
+ "outputId": "d77af014-769f-4e20-b4ac-bfdd40502128",
+ "colab": {
+ "base_uri": "https://localhost:8080/",
+ "height": 68
+ }
+ },
+ "source": [
+ "context_rcnn_model.inputs"
+ ],
+ "execution_count": 26,
+ "outputs": [
+ {
+ "output_type": "execute_result",
+ "data": {
+ "text/plain": [
+ "[,\n",
+ " ,\n",
+ " ]"
+ ]
+ },
+ "metadata": {
+ "tags": []
+ },
+ "execution_count": 26
+ }
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "colab_type": "text",
+ "id": "Q8u3BjpMJXZF"
+ },
+ "source": [
+ "And returns several outputs:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "colab_type": "code",
+ "id": "oLSZpfaYwuSk",
+ "outputId": "63a3903f-529b-41f9-b742-9b81c4c5e096",
+ "colab": {
+ "base_uri": "https://localhost:8080/",
+ "height": 136
+ }
+ },
+ "source": [
+ "context_rcnn_model.output_dtypes"
+ ],
+ "execution_count": 27,
+ "outputs": [
+ {
+ "output_type": "execute_result",
+ "data": {
+ "text/plain": [
+ "{'detection_boxes': tf.float32,\n",
+ " 'detection_classes': tf.float32,\n",
+ " 'detection_multiclass_scores': tf.float32,\n",
+ " 'detection_scores': tf.float32,\n",
+ " 'num_detections': tf.float32,\n",
+ " 'raw_detection_boxes': tf.float32,\n",
+ " 'raw_detection_scores': tf.float32}"
+ ]
+ },
+ "metadata": {
+ "tags": []
+ },
+ "execution_count": 27
+ }
+ ]
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "colab_type": "code",
+ "id": "FZyKUJeuxvpT",
+ "outputId": "d2feeaba-2bb2-4779-a96a-94a8a0aff362",
+ "colab": {
+ "base_uri": "https://localhost:8080/",
+ "height": 136
+ }
+ },
+ "source": [
+ "context_rcnn_model.output_shapes"
+ ],
+ "execution_count": 28,
+ "outputs": [
+ {
+ "output_type": "execute_result",
+ "data": {
+ "text/plain": [
+ "{'detection_boxes': TensorShape([1, 300, 4]),\n",
+ " 'detection_classes': TensorShape([1, 300]),\n",
+ " 'detection_multiclass_scores': TensorShape([1, 300, 49]),\n",
+ " 'detection_scores': TensorShape([1, 300]),\n",
+ " 'num_detections': TensorShape([1]),\n",
+ " 'raw_detection_boxes': TensorShape([1, 300, 4]),\n",
+ " 'raw_detection_scores': TensorShape([1, 300, 49])}"
+ ]
+ },
+ "metadata": {
+ "tags": []
+ },
+ "execution_count": 28
+ }
+ ]
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "colab_type": "code",
+ "id": "CzkVv_n2MxKC",
+ "colab": {}
+ },
+ "source": [
+ "def run_context_rcnn_inference_for_single_image(\n",
+ " model, image, context_features, context_padding_size):\n",
+ " '''Run single image through a Context R-CNN saved_model.\n",
+ "\n",
+ " This function runs a saved_model on a (single) provided image and provided \n",
+ " contextual features and returns inference results in numpy arrays.\n",
+ "\n",
+ " Args:\n",
+ " model: tensorflow Context R-CNN saved_model. This model can be obtained\n",
+ " using export_inference_graph.py and setting side_input fields. \n",
+ " Example export call - \n",
+ " python export_inference_graph.py \\\n",
+ " --input_type image_tensor \\\n",
+ " --pipeline_config_path /path/to/context_rcnn_model.config \\\n",
+ " --trained_checkpoint_prefix /path/to/context_rcnn_model.ckpt \\\n",
+ " --output_directory /path/to/output_dir \\\n",
+ " --use_side_inputs True \\\n",
+ " --side_input_shapes 1,2000,2057/1 \\\n",
+ " --side_input_names context_features,valid_context_size \\\n",
+ " --side_input_types float,int \\\n",
+ " --input_shape 1,-1,-1,3\n",
+ "\n",
+ " image: uint8 numpy array with shape (img_height, img_width, 3)\n",
+ " context_features: A numpy float32 contextual memory bank of shape \n",
+ " [num_context_examples, 2057]\n",
+ " context_padding_size: The amount of expected padding in the contextual\n",
+ " memory bank, defined in the Context R-CNN config as \n",
+ " max_num_context_features.\n",
+ "\n",
+ " Returns:\n",
+ " output_dict: a dictionary holding the following entries:\n",
+ " `num_detections`: an integer\n",
+ " `detection_boxes`: a numpy (float32) array of shape [N, 4]\n",
+ " `detection_classes`: a numpy (uint8) array of shape [N]\n",
+ " `detection_scores`: a numpy (float32) array of shape [N]\n",
+ " '''\n",
+ " image = np.asarray(image)\n",
+ " # The input image needs to be a tensor, convert it using \n",
+ " # `tf.convert_to_tensor`.\n",
+ " image_tensor = tf.convert_to_tensor(\n",
+ " image, name='image_tensor')[tf.newaxis,...]\n",
+ "\n",
+ " context_features = np.asarray(context_features)\n",
+ " valid_context_size = context_features.shape[0]\n",
+ " valid_context_size_tensor = tf.convert_to_tensor(\n",
+ " valid_context_size, name='valid_context_size')[tf.newaxis,...]\n",
+ " padded_context_features = np.pad(\n",
+ " context_features,\n",
+ " ((0,context_padding_size-valid_context_size),(0,0)), mode='constant')\n",
+ " padded_context_features_tensor = tf.convert_to_tensor(\n",
+ " padded_context_features,\n",
+ " name='context_features',\n",
+ " dtype=tf.float32)[tf.newaxis,...]\n",
+ "\n",
+ " # Run inference\n",
+ " output_dict = model(\n",
+ " inputs=image_tensor,\n",
+ " context_features=padded_context_features_tensor,\n",
+ " valid_context_size=valid_context_size_tensor)\n",
+ " # All outputs are batches tensors.\n",
+ " # Convert to numpy arrays, and take index [0] to remove the batch dimension.\n",
+ " # We're only interested in the first num_detections.\n",
+ " num_dets = output_dict.pop('num_detections')\n",
+ " num_detections = int(num_dets)\n",
+ " for key,value in output_dict.items():\n",
+ " output_dict[key] = value[0, :num_detections].numpy() \n",
+ " output_dict['num_detections'] = num_detections\n",
+ "\n",
+ " # detection_classes should be ints.\n",
+ " output_dict['detection_classes'] = output_dict['detection_classes'].astype(np.int64)\n",
+ " return output_dict"
+ ],
+ "execution_count": 0,
+ "outputs": []
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "id": "0FqVkR3Agc6U",
+ "colab_type": "code",
+ "colab": {}
+ },
+ "source": [
+ "def show_context_rcnn_inference(\n",
+ " model, image_path, context_features, faster_rcnn_output_dict,\n",
+ " context_padding_size, resize_image=True):\n",
+ " \"\"\"Runs inference over a single input image and visualizes Faster R-CNN vs. \n",
+ " Context R-CNN results.\n",
+ "\n",
+ " Args:\n",
+ " model: A tensorflow saved_model object.\n",
+ " image_path: Absolute path to the input image.\n",
+ " context_features: A numpy float32 contextual memory bank of shape \n",
+ " [num_context_examples, 2057]\n",
+ " faster_rcnn_output_dict: The output_dict corresponding to this input image\n",
+ " from the single-frame Faster R-CNN model, which was previously used to\n",
+ " build the memory bank.\n",
+ " context_padding_size: The amount of expected padding in the contextual\n",
+ " memory bank, defined in the Context R-CNN config as \n",
+ " max_num_context_features.\n",
+ " resize_image: Whether to resize the input image before running inference.\n",
+ "\n",
+ " Returns:\n",
+ " context_rcnn_image_np: Numpy image array showing Context R-CNN Results.\n",
+ " faster_rcnn_image_np: Numpy image array showing Faster R-CNN Results.\n",
+ " \"\"\"\n",
+ "\n",
+ " # the array based representation of the image will be used later in order to prepare the\n",
+ " # result image with boxes and labels on it.\n",
+ " with open(image_path,'rb') as f:\n",
+ " image = Image.open(f)\n",
+ " if resize_image:\n",
+ " image.thumbnail((640,640),Image.ANTIALIAS)\n",
+ " image_np = np.array(image)\n",
+ " image.thumbnail((400,400),Image.ANTIALIAS)\n",
+ " context_rcnn_image_np = np.array(image)\n",
+ " \n",
+ " faster_rcnn_image_np = np.copy(context_rcnn_image_np)\n",
+ "\n",
+ " # Actual detection.\n",
+ " output_dict = run_context_rcnn_inference_for_single_image(\n",
+ " model, image_np, context_features, context_padding_size)\n",
+ "\n",
+ " # Visualization of the results of a context_rcnn detection.\n",
+ " vis_utils.visualize_boxes_and_labels_on_image_array(\n",
+ " context_rcnn_image_np,\n",
+ " output_dict['detection_boxes'],\n",
+ " output_dict['detection_classes'],\n",
+ " output_dict['detection_scores'],\n",
+ " category_index,\n",
+ " use_normalized_coordinates=True,\n",
+ " line_thickness=2)\n",
+ " \n",
+ " # Visualization of the results of a faster_rcnn detection.\n",
+ " vis_utils.visualize_boxes_and_labels_on_image_array(\n",
+ " faster_rcnn_image_np,\n",
+ " faster_rcnn_output_dict['detection_boxes'],\n",
+ " faster_rcnn_output_dict['detection_classes'],\n",
+ " faster_rcnn_output_dict['detection_scores'],\n",
+ " category_index,\n",
+ " use_normalized_coordinates=True,\n",
+ " line_thickness=2)\n",
+ " return context_rcnn_image_np, faster_rcnn_image_np"
+ ],
+ "execution_count": 0,
+ "outputs": []
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "id": "3cYa2B8uAYx0",
+ "colab_type": "text"
+ },
+ "source": [
+ "Define Matplotlib parameters for pretty visualizations"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "id": "9F8okR1uAQ0T",
+ "colab_type": "code",
+ "colab": {}
+ },
+ "source": [
+ "%matplotlib inline\n",
+ "plt.rcParams['axes.grid'] = False\n",
+ "plt.rcParams['xtick.labelsize'] = False\n",
+ "plt.rcParams['ytick.labelsize'] = False\n",
+ "plt.rcParams['xtick.top'] = False\n",
+ "plt.rcParams['xtick.bottom'] = False\n",
+ "plt.rcParams['ytick.left'] = False\n",
+ "plt.rcParams['ytick.right'] = False\n",
+ "plt.rcParams['figure.figsize'] = [15,10]"
+ ],
+ "execution_count": 0,
+ "outputs": []
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "id": "YGj7nXXQAaQ7",
+ "colab_type": "text"
+ },
+ "source": [
+ "Run Context R-CNN inference and compare results to Faster R-CNN"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "colab_type": "code",
+ "id": "vQ2Sj2VIOZLA",
+ "outputId": "1c043894-09e5-4c9f-a99d-ae21d6e72d0c",
+ "colab": {
+ "base_uri": "https://localhost:8080/",
+ "height": 1000
+ }
+ },
+ "source": [
+ "for image_path in TEST_IMAGE_PATHS:\n",
+ " image_id = image_path_to_id[str(image_path)]\n",
+ " faster_rcnn_output_dict = faster_rcnn_results[image_id]\n",
+ " context_rcnn_image, faster_rcnn_image = show_context_rcnn_inference(\n",
+ " context_rcnn_model, image_path, context_features_matrix,\n",
+ " faster_rcnn_output_dict, context_padding_size)\n",
+ " plt.subplot(1,2,1)\n",
+ " plt.imshow(faster_rcnn_image)\n",
+ " plt.title('Faster R-CNN')\n",
+ " plt.subplot(1,2,2)\n",
+ " plt.imshow(context_rcnn_image)\n",
+ " plt.title('Context R-CNN')\n",
+ " plt.show()"
+ ],
+ "execution_count": 32,
+ "outputs": [
+ {
+ "output_type": "display_data",
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAA1wAAAFDCAYAAAAu+g+jAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAgAElEQVR4nOx9d5xsWVnt2tXVfe8dYJhAGEYFRAxk8JFBER9ZUBAFFRQkCKiAJAcQRMkgUQUlKAxZFCQHA2EMAyoITwUe8BjCEGeGGSbduV3Vtd8fp77qVavWPtV9b0+4t7/v9+tfV52zw5fXPt/Z51SptSIpKSkpKSkpKSkpKSlp52lwaTOQlJSUlJSUlJSUlJR0pFJecCUlJSUlJSUlJSUlJV1MlBdcSUlJSUlJSUlJSUlJFxPlBVdSUlJSUlJSUlJSUtLFRHnBlZSUlJSUlJSUlJSUdDFRXnAlJSUlJSUlJSUlJSVdTJQXXElJSUlJSUlJSUlJSRcT5QVX0mWaSilfLqXsL6WcT38nHuRYDyyl/PMO8/fAUsrGlK9zSymfLqXcfUmfo0spLymlfHXa7/9Nv19pev7LpZTvlFIuR30eUkr5CH2vpZT/KqUM6NgzSymv3Un5kpKSkpIuG1RK+ZVSyn9MceObpZT3l1JuuwPjvraU8swd4nHpWFP8umAqx9dLKS8qpaws6dOUvZTyB9Mx70Pth9Nj1yS+ainl5tTm2qWU/DHapEuE8oIr6XCge9RaL09/37g0mCilDBunTq21Xh7AMQBeDuAtpZRjGmOsAfhHANcDcBcARwO4FYCzANycmq4AePQSlk4E8EtbFiApKSkp6bCkUspjAbwEwLMBXBXA1dHhzc9dmnwdAt1oipu3A3BfAA9qNdyi7N8F8IdLLty+C2BHLiyTkrZLecGVdNhRKeXYUsp7SilnlFLOnn7+fjr/wFLKl0op55VSTiul3K+Uch0Afw7gVtMK2TnTtntKKS+Y3m36dinlz0sp+6bnfqqUcnop5aRSyrcAvKaPr1rrBMDrAVwOwA83mv0aOrC4V631M7XWSa31O7XWZ9Ra30ft/gjA41sXblN6PjqAaV0IJiUlJSUd5lRKuSKApwP4rVrr22utF9RaR7XWd9danzBts2e6U+Ib07+XlFL2TM8Flj1uunvim6WUX5+e+w0A9wPwu1NsfPf0+ImllLdNcfa0UsqjpsePm451j+n3y5dSvlhK+bXWWH1Ua/0igH8BcOODlX1KHwCwDuD+PdOdDOCGpZTbLeMrKWmnKS+4kg5HGqC7+LkGuouX/QD+FACm2/D+GMBda61XAHBrAJ+qtX4WwMMxvRtVa40LmecC+BF0yf7aAL4PwO/TXCcAOG4612/0MTWtrP06gBGArzSa3QHAB2qt5y+R8T8AfATA43vavB3AuQAeuGSspKSkpKTDl24FYC+Av+1p83sAbokOy26EbsfEU+j8CQCuiA7jHgzgZaWUY2utrwTwRgDPn2LjPaZb1d8N4NPT9v8bwO+UUu5ca/0uurtRryqlXAXAi9Fh7OvcWMsEK6X8GICfAPDFQ5AdACqApwJ4WilltdHmQnR3yZ61jK+kpJ2mvOBKOhzoHaWUc6Z/76i1nlVrfVut9cJa63nokidXrCYArl9K2Vdr/Wat9X/coKWUgu4i6jG11u9Ox3o25rfpTQA8rdZ6oNa6v8HfLad3zC4C8AIA96+1fqfR9ngA39yi3L8P4JGllCs3zgfAPHW6VTEpKSkp6cij4wGcWWsd97S5H4CnT3dMnAHgDwH8Kp0fTc+Pprspzgfwo42xbgbgyrXWp9da12utXwLwKkyxsdb6dwD+Gt32+LsBeNhByPTJUsoFAD6Lrrj48ka7rciOKV/vAnAGgIf0NHsFgKuXUu66PXaTkg6N8oIr6XCge9Zaj5n+3bOUclQp5RWllK+UUs4FcAqAY0opK7XWC9DtB384gG+WUt47raA5ujKAowB8Ii7o0G1L4AucM2qtFy3h72PTO2bHAngXumodSilXL/Syj2nbswBcbStC11r/G8B7ADyxp837AJyOgwO8pKSkpKTLPp0F4EpLto+fiPmdFV+ZHpuNIRctFwK4fGOsawA4kQqd5wB4Mrrnp4JeCeD6AF5baz1ri3Iw/fh0/vsCuAW6rfiYvgwjcPN+2JrsTE9Bd7dvrztZaz0A4BnTv6SkS4zygivpcKTHoavM3aLWejSAn5weLwBQa/1grfWO6C5sPoeuMgd0d4SYzkS3HfF6dEF3xemDvGj0adJ0m+AjAPxqKeUmtdav8ss+ps3+AcCdC72BcAk9DcBD0W3raNHvoQPDo7bKa1JSUlLSYUOnAjgA4J49bb6B7kIp6OrTY1shxbmvATiNcPGYWusVaq13A2bb518J4HUAfrOUcu2esdqTdvRWdPL9/vTYXQk334ityc5j/j267Ym/2dPsNehecvXzW+U1KelQKS+4kg5HugK6C6VzSinHobsoAQCUUq5aSvm56QXNAXTbJibT098G8P2x/W76kotXAXjxdC86SinfV0q588EyNt3f/mrMPwfG9Hp0YPa2UsqPlVIGpZTjSylPLqXczYz3RQB/BeBRPXN+BMB/A3jAwfKdlJSUlHTZpFrr99BhystKKbHLY7WUctdSyvOnzd4M4CmllCuX7idGfh/AG7Y4xbcBXIu+/xuA86YvjNpXSlkppVy/lHKz6fkno7uwehC6Fzy9rmy+HVDH2go9F8BDSykn6Iktyq70ewB+tzXZ9E7f0wCctE0+k5IOmvKCK+lwpJcA2IfuDtXH0G0DDBoAeCy6yt530T3b9YjpuQ8B+B8A3yqlnDk9dhK6atjHptsT/wHtfe3b4e9upZQb6onpdoY7oLvz9vfoXnrxbwCuBODjjfGejul2ix56CrqXeyQlJSUlHWFUa30hOmx7CrrnlL4G4LcBvGPa5JnoXrb0fwD8F4BPYuuvQP8LANel56Q3ANwd3Qs4TkOHta8GcMVSyv+a8vFr03bPQ3fx9UQ31hZl+y90jwY8oXF+meza/l/Q4WofvRlbf546KemQqdSav/mWlJSUlJSUlJSUlJR0cVDe4UpKSkpKSkpKSkpKSrqYKC+4kpKSkpKSkpKSkpKSLibKC66kpKSkpKSkpKSkpKSLifKCKykpKSkpKSkpKSkp6WKirf6QXJNKKbWUglorSimz4/oyjjgXx/U7H2sd176tuVrzbuWz9nVjL+PTUczTkmG7tNV5D3bslo63O5cbK+myQZeUbVpxtNO0XXmcT7fyVmuO7eQv7d/67HjT8fpylZ5fli+3koulzZm1Vv5x8KQeSoxMjNzuWEmXDUqMTIzsm6PFc63VKnlLF1yllFvWWj/WOj8cDjGZTDAYDGaTbmxszJhS4QaD+RtrtdZZfwCYTCZz5xms4nO0nQo36xM8xPdSCgaDwYICneNxHz7vjDEYDOb4jGNqUB0rPkd/5wA8jh5zjqDyLgM17ss6VLsMBgNsbGzM8eDax+doz2PH97CLk8fpjL/zXGpLF2Tx54LK2U31GLIAi77IsvF5lodtwHExmUy2nDDi88rKyoJMPHdrodVaIAWvwXeMu7KyMsez0yn7h9OB8zv1Y+4f/DvdqG9xrMTxrSz23Lnop3OyzjSeVlZWZrHQ8sEW8Lg8oDy5/OQ+u3zq8ivHlfKsMab613iM/vF9NBp9pangXUjL8BFIjORjiZGJkYmRm5QYeWRh5Gg0auvWGWM7VEqp4Yg8eQjFjDqBgtEw1HA4nAOICKaNjY05h9fgUgWEA/YpKHgCsJDY4/vKyspCklWKthwAzujqdAp4PGcLRDTgGIhZT8yHI9XfysrKbP445gCPdajE8qvMeo4DOuyufZyeWTdBmqD1e8se4XetBUccH41GC0mHx+TEzOPy4or9noOTZQM27eD8nG3L7XVsBjM+x2OzbVU3LslqXCuIBsUcrh/zE+cjSTOwuHlbCxFuy+O7BWjEiYJf9OHco2NxPmnFo/ZRe7PdOM7dAqQVhzGW5je1dSsf8YJJ24aPqW8zb6UUrK+vf6LWelMkbYkSIxMjmR+Wkz8nRiZGMj9xPjHy8MLI8XiMyWRy8He4lpE6QCjAJXxWACso+sTxcLQYUxOkVn+ibyQHdTQ2Mjsa88PjRnKbTCZzytfxmB9VvDqRS8bMj7bVANIKRhDzosDnAE0raTwG08bGBtbW1jCZTDAej2e8Md8KChoU8T3s4vh01VwAs8XDeDye+YAmFk3oPKcmJ1eV0sUk8zcejxeC3iVrTa7MKwM9+4mzjy6EVCdhC+cfLYB1vs4A5IAn7OHiyOlVq4EKsOoPcZz9phVfSq6yvbq6OhtLie0VOUSrzDEO5w7VmQILJ12Wx/Gvvu4qvWFXznma/N34ChSsF81vzFMsdoIPjk/2Gc4VLR0nLafEyMTIxMjEyBgjMfLIxciWXYAdemmGOlfLGTgRMGP8P4zsmHcK43N6q77FI/MRx7mawkbQZMM8slytoNMr8WjPjh9Jk4OZK3MuiQHovX2rFSUmBXFeAKjt1tfXZ8lckzzLGlVOlll1Gvy4aqhL6qw71Y3Tu55rJRfWQyspq69qXwWp8CHuC3SJDti0VfAZ/VhX2l/nDr9jf2nFB/MW5zR5BS/BR3xX4GzZSIEi/nMMKx9MLimGfOxj2p4TXSR2XWgp/6w/9fcYR/MAt2H52I782fmS2jD8Mtop3yEL68QtONxig8dXXqONxpSTuTVmK3aTllNiZGJkYmRiZPxPjDxyMdK1C9qJl2bMCRlM8DllkJ2SlaKBoUHlgITn5CqJS6LaR8dmQzOg9M2vAKWVF55L+8exSNZMCgZ6nvlpASx/j+B0FcBW1Zp1wsGiulOnd8mOb6Oz7Go3tXvMzVWrGCuSQIzLFYiYQ2VjwNRA1QTLfDMItsBMfYZ9nPXnKp3OflxFU3/gai6DE9sm/FH9nRdDCqhcKYq2ansHErHVoBU/TlYF12i3vr6+UM1zxHPo8b524aOql5BHFzzRXvkO3jU/OHuobdgGMR/7nCZxB+z8X0FNFzhaRVbZuU/LTjxe0tYpMTIxMjFyfr7EyMRItceRgpF9tCPo6RK3Cs/CspJUsCBWhIKKJrAWLxxw0S6qKKpMNjYnAXYyTmh8hdy64m8RVwqcrvgcV3VUt8FH3NZ3c2qicFf93Fb7AlgIJNaLOnGcZz1zP+ZHt7ZsbGzMBRmPyQlNE5wGsepR28W8uujgwNQAU531LSj4M2/10CqRI9UnzxHfp3uE5/q1bt+34oV1wn2ULwUSJn4uBfAPBmtss6108RBjaIJUQGK7OZvzZ47Z0BP3c4u9vuPqM25x5o6r3mMOlqela80VPK4DmvCziFuWlxcXGpPRj3OO85Gk7VNiZGJkYmRiZGIkmsePFIzsox17houFHAwGC7f9HLngU6Wx4Cwgz8vt+bNTBM/HiUsdSfu54HcJXGXSczxfOBwbXfuoflhPrJPoFw9Y9oEsj+2AbFkfl4SZL7Y7g3HLId1igsklmGjPyUITgIIB92VAcgHJ/qeJ3QV0yB1zctDyosb5a3xnPhUsWA7ew+yATYFe9cvf4yHjvoWd6i744X6uLcdrS15XpebPaj/1WSc7n2d7cBtXmVf+9POyHBHHVVat5i+LN23Pfujm1sVPjKvbuhy1wMkBYQs4k5ZTYmRbJj3H8yVGzs+TGJkYmRh52cbIPjrkO1w6uUv0ShzsmvDVEdQJuD/PzQrmylqMyYrg5NACnJibx3SApoblylVfO9WTJrqYTx0rSKuSTrfOVnpe/6tOHcg7p2K52T46FydVtoPzlVayVR24INPEzDxrAKuu1c5qH9azG8+dc9UX53PODgxSGh99ccbVwr5FQh+AuKQZ7d0tf3dLfiu60/gE5p+DaOmupW+dTz/3bQ1woKt6dvmH27Nv69aLOKYx5e4g8HeurOn5Vvy4xYvTUeikBXy6aE3aOiVGJkaq3ImRiZGJkbsPIw/5gkuTbmtCDZL4HMzr7XYWximLg4znZ8M5fhhstK2TwQVCvJ1JgaZPB/HdJVMNvr5Ac07UAmP33fGm8ro+fX2VLz7W4jH+MyDw974xVTYFDrWd+lDcSla/0geduW9LV8pLjK8VTAf6bkzHtwN8YN6XXJxwZVOTV/zXxZHjh+XQeGU+mFxVTuPbyahjuLzB49Ra5xZfEZ8a25oDOA+0Fil9pAsMBRS3AFHe44/buqTNeub2DtyYn5ZeW4sEtoUCIp9P2h4lRiZGKl98LDEyMZJlSYw8cjFyR7YUxkScJFuCOmDgvpoU9areGZHn01vVOgff1nYB3JIrFNuqXHHSjGNuHG7fBwwKVK2xlNjRHEi5hO4SfrR3gLsM5FUXqkc+1wIcHov9I/rwmNFuWYVM5wM2fUoriZxo+HY786bjcl+XoNiu6t8sE/s5b7Vw9nNjqt1Zlwx0GgvOZjyXxpXaknl0sdpKRMti0Y2l211Yz1pBDD072WK+sK9b3Do+Wz7Y0pfypDHQiiPWAcuoumz5CcvXl3e5rS7wQt99QJK0nBIjEyPdeE6PiZGJkaq7xMjDHyN3ZEuhfnYCqRDxPwzd2hagc7lEoVfAmoD4FiPzyGPwZwccTk5N9GpM/nNz8ndNNsyTc1zVt/LgzrEOefxW4u/TFfOrC4UWoDl/YBtykLTG7ks8mljVH92boVQmt+VDx3J25B8mbOksApWf3XB+wvIo2PTJy+dVBp7TVfGY1Id1TJZF7aAJk22nCyzHs/MbllXHVOJkyOO6qpibx+m6lY/YJ91CRuVS3275amu+PmKbaa5TH2R+W2O4WNaxkrZGiZGJkYmRiZGJkbsDI/toR7YUBiPMlJ5zCZL7OAWwEdxvDrgg0OMuuFsJvhWIuq3BJRk1UHznqpA7Fsc1ADixsRz6P/roNgF2GievUkvvPKZuZXDAweS2pXBg8zhuYbAsMasu9HN814DlMeKYPsDO53grhyMF/JBPda+A0bc1hO2qPqRysJ/oGDy2S3Kuj9MX89PSsat4uSSpY7cAPubSOHC+zT6jOmc7aluex/VX/WkSVrvzb+04kFT5NL54Ya0+x/p3/sLHeQwXJ2ybINanAz4eN2nrlBiZGJkYmRiZGLk7MLKVP4Adei18a5JQoN6KVkfid+pH2zCMm6MFBn2JJL7zcU7ubtz4rm9qYmfW18FqwDn+3BaIlpOq7H3yBj/s0NyHx3XJwvHjAlcdVgOqFZAxh9rVteOgUnuxDpkXB0Sqq1aCdVt8og37B//ngFV9xwJB53Q+xvbQZOHAg8+55O4onqlQ3XB/JwPrp8+2SqrnVkVJQVvlVn6DFKjjmPvcAnPlw8mjiz/lh+OkVc1kedSvtQ3nJNaB5kcdX/ngthwXvK2ipQd9fTLzn3TwlBiZGJkYmRgZlBi5+zByxy64HHEyr7UuJLloo32Axd9M0KtaTvZKqkjuowpxSZUNwglM+7FsmgTcmE5m5+gacDxG6DD0oWDIx/mH61o8sB55DpeM+5KG06kGRIylPyoYFPNqtZSptU2Dz/NvX/Txpzz2JXpNpMFny690C1C01USkulV/WBbECnSa1OJc+ImrGMb38BuWKXjQ+FN9Rn/VmwONFji0dNnKEfqd+2ncxzFd6CkYc+w7HnkeBUI+pouR1gKhpRNdeIUPuj6txVdLn25x38pPOtdWFi5JW6fESD9HYmRiZBxLjJzXT2Ik5o4fThi5I89wxcN5zmHUwGoANjAnfw1cFW4mgBiR54pxNIBaV9k6t55vAQ+TGlJ55DaxJcIBipK7EmedAps/9sdjq114LOWb5eLEwFs3VBYGWtWhyssArPpgPlvB3peQhsMhSilzABpzqc5U1laQu4TkdMn65C0iXBF0/Vmv7JNciWQ+VQbVr4Iq94vtILow4u9c9QneFZiUD5WHxw0e3cJQx+Qc0LcIYNk1xpYtdlinWrFv+YbaQucNvuO7e3A2fqzUJezWMQUflpkXXH0gxDHM/GtuVX3G+Dyms0fS1igxMjFS+8V4iZGJkYmRuwcjD/kthTwxC89BxUlQE7VzAD4WffX2fZAmH5eIQlkRIO7qtY+vkEUN1JeQ+bwDLxfYffyoM2h7Bxou4Su/qjP+r20nkwnG47EFRwYRVwHihD4YDGZAB2D2w4IuIateNaHGWNGGAXSZjlUGBnk+z/K4RZPqXX1HfzSR4yR8MRYCMUfog6uxmhhZduePk8kEw+HQAiD34+OaNBlY2J4OnLli7MaNxSOP3wceajOVvaV7tbvGZYynx1sLDuUh8gBXQ2utczZj2Vvz8efBYDAXWyqj8ql8KWAwKOgCjxdoPJ+Or3lhK3ZKWqTEyDbfiZGJkYmRiZGt+fjzkYCRO/ZaeA0SdhROwHE+bpm3Kls6rlZx3PzKB5MCXV+icX2DZ5fYXBJujeOSzjInZh4A/wBjjOOCiMePwHbAzw7GAaE8OTBVPridOrvypvxzBa5Pd1rh0oSgVTXl21UonO9xsLskyECmCYXn18TibK0+yeNrxSmOLQMzAHPPgDh9sr4UrHVcTp68MIi5eKHAQMcJLsZpVQl5Ps4jrJeWvVhuB2ot/9eFCceK82MGRuaJSfu75M0AyO24r8aL04eb1+mUx+NFheNfwX5lZQXj8Xhh3KTllBiZGJkYmRgZekiMnOfpSMHIVn4CduiCSycKBjWwo60eU+IqBo/P/VUo/c7gw3xFNYJfUeoAzAVeVGI02XI/5UPbOl1FhYVlqbXOADcCkB1IdeISSJxzlQ4HdtpfnUyDm5OAs28c5yTCztuqamoA1lpnP9TnZGA/4gqUVr1Y97Hvdzwez+ku+AteYxtG6NEtanix46qYcSz8x9nQ6RvotoHEAjd4CtDiedhXnB31tj7bQBcnvLhWP2d/4vEDaFWevspjfFZQZjnYHnwubMGL0mjDfLuFEJNW+NnWrCPlP3wo7Kt9op2bT2XUnMCxxboLmzPgOUDVeNVcwf0cULDMbuyk7VNi5Ga/xMjESLZZYmRipM6nMh4pGLljd7iY0VbiY+FYSWwAZV4Tvd7+4/GZWhUgDQIdhz9Hko/2/J3HCedtGY8NpklXnZV50Ad6FWQdoNZaZ07nqh2uesDEjstztpyKq1UsY3znRMoJlufmpMuByM6+vr6+APycQDRZqv5bcio/3D/szQnLBZ8LTH3LVPDnkr7yyPodjUYLY7C/cV/t7yrjet7FXoypidwlRl20udhWfUffOM8LEtW3Vh1VZ+xr2s7lAK3YOlKeXZ6K4yy/06EbixdFnAe0r8Y5t2X7tRY6LJ8uUlSPSi2bJx0aJUYmRoaMiZGJkTpm6DD6xvnEyCMDI3fkgqsVVC7w+DM7ghop/rPyl4FOfF5ZWZkL2lZgMY8uwFj53J/51zF5y00roHgcdphop1U71S2TAgbLxW1cxUUrZZrwmd+oPLmxNQnzmGy7GEO3yozH47n96apX59DBnwYz61srdy0e3dhuHN4G4Ko+3CcqS+ETnNh4QaT+qDZ04K6AGomfx2cbMtjwG33ivy7QeDGi8akLO9VnEFcqdRGjCdWBnCZCZyP1MZY/qvO6IGA9a2XdJWJdJIQtWAfL/JTbqd+y/K5SyrxprDKvqkcdg3Mb24F1qy8nUJ0kHTwlRiZGJkYmRiZG7m6MLIcKpKWUGreUgzjxqKCsCHUaDXANHK0UaHLUufj4YDCYS1r82lW+CuaEGgrnfbga5JwsnSzqmHqeeVCHaH3WIIjjOoex1ULyaunSkQtwtZFLfvH7FsxzbEVQYqBhWdUPlE+2m+pME3/YMs5H0tOHaznxORnZxg4A+4AqqLXI6bO/UvDqEjTPw3MEMMZDqLzg4DbMI88XvKi8Gg9sQ+XPJUunK61MBUVcjkajBRB2IOFk4LnCxppUOV8pn61FgVbe2QbcX3Mg6yk+K4/xWSuh3Jb50jlaOnGLHVdtHY/Hn6i13nRhgCRLiZGJkYmRiZGJkbsDI6c50CaIHdtSyAHLFYVgiBXLitAEywpTRS4LkviuyTYMw1UJdhbl3yVdt//WORcnH+7fCh5nVAUKbs+Ap+Np8nG6dv10HpbPXeUz6S1pBXKtDAVPq6urM175P8vBY7D9OQEPh8NZlYl1x/rXqqMGWACJ+qT6ZotirNhL7oCHx9MFQQsgdHHlFko8nvqUiy0dP6pb/FYhXgRwHLO+FLT5e8QaJ9awEbfl/goWumh0yX/Zq3S1v0uWGrsuSeuihftpruI3gGk7XaiqvtgvXQ5RmVgejlXlS3NcLKaYP26n+YNjMOngKTFyc67EyM3PiZGJkYmRuwMjd3RLYTDMTDJz8dkl4TimVQa9CmXHYgdkMFAgUaOoAtXwTK4axMlNFc86URDpS9xaMeCxHWCpPvm4BiJTHwDxfwcefYDCjqzyq0zKBycxYL5iEMHWGlMrbqGPVrXVUQCOyt8HJi5JcJLl6rAmRF2oOAq+AyzjmFvQhJzqF47vPh1wktfFiEv6KpfmgTgXANtKsi1ds05dQo1FnuODY6EVD8xLqz+w+GC0W6SxPYJnl+BdPIYfBwDHwrUV9xoDrp3eDWBe+SH4rcR5X05I2holRiZGJkYmRiZGHvkY2YeT/hVI2yRWNCc2ZUiZ4e+8v3ZZIDihWkJygGg7TVTxX694WZnqQKroMGLsjw25NNkukzGcUdvH8ZhLr6o58UQwhvO76gYnKHXQ6Mu/Su906ABKjztwVEBWmUMeltnNyUmcE0+rusp6ir3jLdBfdox50ucStE0kWAauZTpxQBKf+S1iqntOImq3lkzO19mHGag18XJFWR+G1vGXASj7AY/v9M+88OfYShF6Z3m5csUyudylABg65i1iYYs4r3cJeBHM1XCeU3nlObk6rTrgNiyjto0tJQzOnM94Dh5D3/SVdHCUGJkYyfpNjEyMVEqMPPIx8pARVJlX47iEGJ9d0LFDuyTeUm60c4DERmeF6bjKV8jC7bkqw2OzLlrBrcHM1amWsRQ4Odm0dBXfOWhcgm+BggKMJjmWm89zoLjEz/bRiuPGxsbcb3joNg2etyWz6kd50UCNRMFvOXL20T/Vc9hOH2pWe+tn5xMKkgEYkaQ1ubH/BGkFnNsp76xT1aNWRuO4gkLIzkbi9KcAACAASURBVLHBNuZ5NXadPqOP+oCzTWuxpPLHeLxodfMyvyyvytXKEzGm6lhlYbs4X2KeWJYWWHCeYoDSCrbGM+cQXnzFMc4huiBO2holRiZGJkYmRiZGJkbuSMkyHuzUBM4Jm5PAbHK5padjAFjo16oOAlhwclYgK5uDXoOFDRt93L525wjq3C6hqUPxXOpoqhPm1yUHdaRSyuy3Obg/66ulD5aJ5+eqiOqN9cTbP5Q0uateXLDzWMoDy6f+ocHCc6l/qh9GkmRdRrVP7RWfGRBZh/oGIJ7XycoyMuDG/udoo37i4sL5B8sffzGna8Pfl1VTWfc6v+pZK4bcn9tyFanFh/obgzDrUWMkjkU8uAWL2pNtqbrVnKa89bXjhN+qyLkFi86vudjx4XTi5tSFQ9L2KTEyMTIxMjGS+WD7JkYeORjZimlgBy64IlCcQTm5OmpdoYcyNWEvU7IjHov7sNM453XzRXJx55Und0tS+Yr/LhkGqWPoHEq6L5rBTeWNMfXBSqVWQLmg4/OsE8cD64iTAwe7kxtYfMvXMuDSeVQOZzfVX6vyw2PxOQYOHc8lVh2T+enzMbUVb13gBMm61N864fNa5eSxo61baLRkcMma5dMc4WJGFx6uWsZjK0i5MeMY5ymt7qvcAeour3EstLaGcGywP+hdDpanb0HaWjS1ZOmrnDJ/zLsuNpO2R4mRiZF8js8nRiZGxvfEyM02hzNG9tGOlitbSVMTNIA5weO4qxwEKajomNoXWNyrHZ9Voc7oYUSuonB7bsNztuR2Y2tSaCVtTdR6+5a/O107wHJgyqTJ3tm2pbdWdYfBRGVo7dlWEGDduYBd9tmBgVucxPjxmefnPnGMK2rRnve9tyqeOqfGBx/Xh5Z5YRNzuGqW+qjGg+7XVl0rYOsiUedTWfoqhn2AqnHQ0pFbKPHcyjv/6VYTNyefYz07Up5cX+fL2s7psmXXOMZy8Tzhq1ztdTK3AFTbJx08JUa25XZjJ0ZuUmJkYqTaJTFyXpeXdYzcsdfC8+RhQD4exMbjBKptOaG4OYKcI/YlEZdA+VjMqVfz6iT81iE3px5z+nJO4Y6rHlSHgH9wUUGmFQBKk8nmQ7iuGqg8x/zu9nifDpYFsXNgBSSWmwHSBYH6Avfnt9Iw7zw+99XkxAGviZbnDT25hQ7L4Kp1PF4pZWELhkuufbpnvl28qmwqvxu/T+9KrmqnCw0HBG5u3crEIMKvoV0Wqy1gZOBx7VnO8KtWfDt9OWBTnl3e4+Ouustz1rr5+mHHu+NN40wXgUlbp8TIxMjEyMTIxMgjGyP7aMdemuEmYuWyI7h+HBA8VijY3erksVu3JzUQ1BmCuFIVgczGiOPBS99tb/eZqz0a+My7JlOXUJ1M7pyzRXzWIHaOpcDhKkNOdrWzJlq1J/tJi2f+rr7E59SvIrHoMU76cZ6TBW85iD7BX18SVN8NvanNVS61Kfujjhdz6vYNp5c+/uKcq3zHOK2k6BYMreTE86qP8/hu+0dLPzqPjss2Zt3x1qCWXA6c+ZzjQbeCtMCWY0uTtG5LUKBzPq5zOr0on84XWu0d70nbo8TIxMjEyMRIPsa8qL4SI49cjNyRZ7hYEK46aED03WJUJ3CJoiWMc3KXBNjommDcWK3g5DF1ruDBfXbzRZuW8+m8zvE5aJhnBTrHt55XwHUJ0AG+qyjyeZWXnVfbu0TK/CqPTgb+7ObneVwy7PseY3JlM3TA4KS+qA+1qj412FuJxVUUlyXGUua3ckR79/skzLv6QZ9+mNwWo1bSDzu4BSHbiKtiyrPTqdtHHrHfijWeqyVna8HGVbFWnIZe2MdV78pDy19iTleN43Yazy4fLZNxWW5KalNiZGJkYmRipFJi5O7DyB29w8UOH5NrYog2rFRg8wFFvYIPJ9Pg4vOt49tppwmI23KljmVoOZ4m2iAegxOL47EPyFi/LRByAOfk1bk42BSsHGgD3sY8brThxOEAw8nKetGA1HnU57RdH1gooDuQ4fnjj3WnW0ZYPvUd9pvWwsYdc4CtPCixz/UlQRfD/J3Hc4sBrdLF/2VbB9TOyjd/V1voYoL/8xuleAHB86pvqc0dkGqsKHD27V/XMVR/Tm7VV3x2C1rnqy1S2V2OZP62ut0qaZ4SIxMjEyMTI2OcxMjdi5E7docrJmPDuS0FfGtak5g6hgKLngcWA5wNolUUl/A4sDUhMf96S52Jx28ZVZMvkzqFBin/Zx1zYDge+DjLxaSyRzC0wAyYfxBV9apBGf91+4kmMJdIVW/RzyVonS8+q2+5W/LRRh9K5jm4Lf8QZBD7WitZqV2cHzuw0PG5/WAw6LUHH4tX5vKYDIo6L4MGJ0i3LcQBu0vMqrNSFhcqrkIXMuqbo4LcvCoH86p2cXESY7kk35JTx2npQfU3mcy/USza69wt3+zTu8u/erx1jPu14jSpnxIjEyMTIxMjEyN3B0b20Y6ULMPYnMydkHxLmc+3glirZg6kol+c56qQzsM8AFhQkCZTnjM+89j6o3d9SY3nG4/HC0Ed7Xg7h4Kt8qHJQbeCqFxODk4oyi8HEo+vcvIeYLavA9dWsPO8LSd3wc26V8DXBYXyxQkmfpSQx4ngZnso8Ib9XHJS8HOV3IgZ1hvzrGOzDhUcOSZ420XwoLGp/uD05Krp+tpnNwbbpLVYUKByIBzfdSuCi69o70Av5OC3YvGChtuoXpwNGMQdvy1gc/I7v9H4VNkd4DJvnD9b2ziUTx3LyaMgnrQ1SoxMjIzziZGJkYmRRy5G9tGOvKWQg7J1Lj6rQC0A50B0zq6JIZTnEhjzoUmJDcDJIo5FsPOP6WkFbzBYfFOR+90VbqdyayBwUuNKooKovga1pWsGIDe3OqRzzBhXF14O8FUu1RvzyyAYPOhWAtabJudIEhpoKp/qhxc/8WOMLM94PF6Yh+3hkh1TABT7sdolkpKrZvMcfXYLMAxeYyxdWLGPu7hzyYRjiiuwnPxD96oDrRSpLh2AMS+tOOLzLkHqnG5Bw0DOsnM7tpfSxsaGfYDX2Vd5DN25vkyqszjWV113Mte6ufDtA/Ug9h8+lnTwlBiZGJkYmRiZGLm7MfJieS28Jvw4zw7EytZqmiYynYPb6Rzs/OpsPD+THuOg0XECENgZ9XWpkeQ5ACM5OZnUaRkcmR8HWvrZOTMnSdfGOabTu/ZXnTmg5zFc0Gvwq+NrhaXFs0vCGlAMjA4QOFBVp5o4uXLrKHhX2+mrklU+9zn8SfsxgLqFEcvMMnA1yCUq9jvlQyvnAOZ0Ebpp+Z3q1cns+GefYHlaiw83Z6tq73zdAVjYsjW2Vs1VFs0lDBYK4i6Hxlzq+7robi0cWvpXWVt2Szo0SoxMjEyM3KTEyMRIleVIx8hDvuAqpcyqFOHwbGB1YBXCgYo6sSa+mIMrATyXggiPpUlWFcpGUEAJYqDgfiE//6nj9iX7OB/zqi5Z56xHDjx2NE5ELBPrTZOk0mQywXA4nFWyXPJ3+lM5WkmAiSt0zl90HlfdZZ0oMGjC0QSgPqXjaiLlz3yM/YYfSlUfDR6CmN/4rImZda5xwgsVBmJ+2F51pHrluV27VvWNq1mtBYBb3MWYXMFVG/JcTkcsm8sFLdKEzFsp9DyP7cZRPUbfiBu36FEAbYEL67gFMtHPgaADZ64Wc46Ivs7HWc9JW6fEyMRIp7/EyMTIxMgjDyP79Llj6MmKnkwmC79/EMSGdpWAlrK0yhdtgcUrfz7GClMHc0mIz7uxFQyYZw1avp3K8yg/qkf9zHwrYDjeoo8mrOBTA5p1zkkoSJO2yqIByP14m4nO62RRfmJMrkIB89sbXPJyuuexA0jCZsPh0OogZGC7cvU2iMFG9R99OIDZxgE2qjsFYCbVN/u9LrKcTtmWLLN7TSzzqraPtgyWqm8HyrpI4jYbGxtzW1UcKMU59W3lXRcBHAM6P8vP/V3+cP2D1Pdan1s/yun0HW2Dd44dfSCcF7LxneeJmBwOh7NFMINNtOE7BayjpIOjxMjESNVXYmRiZGLkkYWRzi+CdmRLIf9QWkzqbtfFOWWQSa9ANUG45B7n1VD8i9FRYYzzzglcEtOE3JKXZY3kwGPomM4wTjeufQvsVH98jG87h+6UYq5IrDoHO1volHWjAaFVTg3SSBgqvwN+5SPG5yAL0kqZ9i9l86FW9tM4xttaeH5OnGwb1g3PpQDPi6AIaJWR5VLQ6fPb4I9l4s8si1aCuW3MET7Dizgdm2XVhQ8nJ52X36akCZYr3py0V1ZWZtuSOKbcgkuPsf1C57yQ0HH4TwHIkcY4LzZ4HPVJfREC+8lWwDz+u6pexFefLph39S/ND8uAJKmfEiMTI+N8YiTmvidGJkbuFozcsWe4ggF9i00wHAwB807PwraSpho2junDgqxMNQjzpElJ5wK6ao4LIg6Q+OMEHYmpj1o88nnlx4GAc27tx4lV9a5JezAYYDwezxKrJhqVrZXwnH5dMLJNGIhZz9FOgSr07pKJs6vSeDyeq3qEbLFoiX76PED4nXvNLI/DMkVyZh65WsikvqP2jvFiPrdvXRdkmsg0ebF+OdnyQkBlY1srCGtidwDYWqy45Bb+qjEZ/GoCjDE4H/H8bmHjbKm61yq2Lk50caSg5HTAPq88OH1wf40dtRUvpt24MYabm/XaiqGk7VNiZGJkYmRiZGLk5ri7CSOLS2bbocFgUIfD+es2rnIsTEiMta4UJ5PJXAWJ/7vk4ebgtqoUdfZWIuIKhjpFkCYubqfgxX3C8PxaVU34y5yxlZwj0TKQaL9WEnZAzvpknYSO3Nh6q1Xf0sP60WqS8sXnXLJVX4ugdv7Hfbj6F4knbBK3i1v79nURo4mT5+Jg5wDXxOLs5OTWpAZgAVhbi6m+GGKAZburfK3KDi8IVHcMVjym8ze2qavwOp1rjLjqlBJX2VQGt5DSvMFjsC+p3VmP4XcxF4O2W1yrvp392AZM6k88X6sPjxdjsBwAMB6PP1FrvantmLRAiZGJkYmRiZGJkbsDI6dbee0V2I5ecCmTqhgnIDsfGy/aal91jj5Q4ePsDFzxiPmCj1Aet+ekx4ZdWVnBaDRqJnh19uirFU51zFYS1IQfPPA2EJ07gtG9GpfBlp3M2UsTX+jFOTDPr8e1jQNcFywho1YVo50D/DiuyUBlDAo/VP1zYEe7OM4+EdUllUUBVBNdKxG29AdgYUwGcE1g0T8SmdqgVW1mXlUXGhNaSV9Gqjsno9qIY4b9khcpLH9LTo5JYN6ntMqq/qkLIOercVzzlSbm1gJOfSLm69MrAznr170Jjv3YAZ4uZLVfXnBtjxIjEyOZEiMTIxMjj1yM7LvgOuSXZmiAuiDlc055+hDeZDKxDh4PsMUYzqA6NgdunNPEFO1Go9HcG1MUYJg/dgyVi2XgByxblbRoH/y0gjJ44Fv1zC+DIAMYt1eg1sTHFYRor4HEdoux4r8LFE4CwX+M6x48ZH5i7NXV1Tk7s9whH+uuVTl1cwSxPC3w4WSicjkbx4PL6ustO7NPse+GXTjR8PMB+hspsWBx8eBi0cnJdmmNE37Hx/mcA0GueKnelA/VC7fXh5f1f/DC8ccyuFjQGHB88TG2kcrN8RDzhu9rbCsA8p/LhTyX+mX8sf4VLB2/3I75Yn/aykIhaZ4SIxMjEyM32yRGJkbuVozckbcUsrJKKTYoOSGxMUMJfMsyiIMoBGIBNWmxofgKVdvqeE55ratlDiB2PnYuVnrLObUKpcDW0jPzrfpkfp3jMEiz44Y8GjjMS60Vo9HIVnP4s3uDjbZj3lWvPAZvZYi/aBNVCZc0NcFxZYl16b6rDlvHmJ8gfeCc/YD9Mb5rctREX0q3JxuYByUX+LoA4M/On12FOOZTIOP/QQwKfMzpNRYMGpO82FF9cnLVcVluB+CsT27PeuUFTfTRRZHK4cYB5p/T4Dbsr5of3DgODGNM5VHBQccNPSvYqh21UsftnD5auSmpnxIjEyPjc2JkYqSOmRh55GCki+OZjvpOboUGg0FdXV2dCchXjqwIFp6dhitp7PSRhFvjBTH/7tZrGNVVIxhk5pQiAcLG4fN9yUaBUW89uySjySLGCx41STojq76VeByVP47x9heVR+3A+olxGSjjT18py0lL9Rc2Y73HMX7bDMujPLRsxvbQwHPbTngO1jPzyonD2VcXFvFmHF706LxsG31TUIBp7AGPLR6txOT8vBX3LX9SP+RFiluoKZ/s5+zHbDutasYx9oXghfOIJj3lgW2oulJfU9258fi4ApbaUXXI7fv8mOXXZyRczLhFCp93ecGBphLrKvxwY2MjtxRugxIjEyMTIxMjEyN3B0ZOLy5tpx3ZUqiG48DigItzwOZtY3Yul6xYUcvmj+/8n0HMJam+4GG+gflb2Ty/qwK2ErUbl53SObV+Vwd0zqWg03Ic5SV45dujLtkzyLPN9DWebAOeZzwe925P0AogPzjNsusCQW/FOz1wZYf17xYWTFwpjPauMuuAROdVH1Swjnn0mQVOGvGcRV8yDPm4isdy82f2Qx1Tfdkt8py9VU8cL5PJZPbGL1cVVkDUsdQXYtHodBFtuUqo1WqNK10gOLDg3OT8x+lYgUOBio/FItQtPpaNq9SXRx2xjrSSmLR1SoxMjEyMTIxMjNwdGNlHO/pa+HBsJpfE1ADRjo9pclEn4bYMGC7xOcM50mDmIGYeHfC0ZODP7m0+rQqEytj32T3kCbQfWtSg4fF4zGUgpMDvEiKPxf04meo8GqwuSce5VrBxcDEvwU9f0DlwccDKCYqBrbU4jXG1euQWMKoXrv4GiHDS48WK6sHxy/rXSq6zu+MxiPu7GOB5FYD65nPArMeVXAyHvlwllfe4t5KtxnFLn335z/XTz9zHLcZCltYCUPWj/blyyXy7/KPAFHK1YiZpa5QYmRiZGJkYyX0SI48sjOy78DrkO1wtY/N5dhhO+uH0TuFMTnnKg7uKdgmKxwpeXCAF8ULD8RjfW47EPLlADB6UR5ZDxwra2NiY7Vt2DsT/VV+sA8cTz8320j/mjXWhxE7JSVDn6tMvgwjPw/ywvRiolWetDvL8XFVmnQUxiDFPPJdW5ZzuXRVR/zO/+jparVxy++AxyPlYH0AocLItVEYlTtyhT632cl/93vJnBgWNkb4E7fyGt+CwL2ocqF5bOmPf6wM8tidXZFUmXjyw7tUeGjsuF+q8Lb31UV/+TeqnxMjEyMTIxEimxMjdiZFLL7hKKbcopVx1qxMFs6zoVjVD+7uEz4bWc3E+jrVug7aM20qICi4a3PHHD6xGX74dq/Nz9SWOq6O3QM0FdisIXDLS7zyHOu1kMr9dQl8RrHK29KhgxPZRHhjYdS7+r7JoILlkqeNwEuf2ehteFx7xnx8AZTladnT8Rxttr8SgoLYL3TpAaulJ/cEBJ+tMFwZqV/YFB6TOFznGeFyNXwckHOua+FXPLpbic1SyXMVO81GQ6lrzDIOV8z0mzhmab5h//qw6cbHtQE7bxB/n6rCbvv53mU52O20FH4HEyMTIxMjEyMTI3YCRfbR0S2Gt9eNLR5EJNbEw6TmnxL6x3Vh6G5QD280X313As0G0rSYsTgZ9TsHzsnGZbzYe928FoQMXx3+fbjkB8LwtJ9W5WDf6mdu2FhEMpKGL1u3Y1q3s4FH50iTdup3MvDEwx6105lX3vLuqC+uRx1Xw0gq2I92yEnvV++za8uE4FzK6H5d0vtVajIQM2i/k0dc3q15cfDA4abxpPGhc88LHje/aho11Hv7MoOXiTvXr4k/n5346Z0uuvhhiX1Rd8ziuqtfSj+uzFUDZbbQdfJy2B5AYmRg5z1diZGKk6iUx8sjDyB1/LXzruLZhQVvH+pKUtgf8A5l6xR39HJ/8PcZ2jqEJq3Ve52Mn1S0IwSPLEI6scqjczLcmXb0CV1BoARU7lP7QoZ53/DhADhm4eqn6iz9OKNxWE2BfUDJvfRUq/uwqjOq/rAutnuocLRDk7RYa+EG8NUB1ybzxPNyHdRbf4zP/1on+GKTqxenJyRoUx+N3YHihFLwq3zrespjVCm+LFx2X5+67g9NXteRxl+WJvvhQefp+nFUrmnq+9Z2PKbj0ycGycJ5LOjhKjEyMTIxMjAxKjNydGLljL81QJp2Q6uRBWsngJBJjukBi5XA/Hp+VwoksEocmAk7kPEbcVtYk2kreeuvd6UhBhckFxzJwZYr59dfN1aE0ibeShuokkhYDnsraAgRNjly1acmviwauzjHQulcnt4C3BYBOB8q/nnNjtRZRrXYKfDyP+p4b182hPHC8KO9hY1cF0rYxn/ISMmhlb9l47GM6hy4g3LhBrdcWA/NVVPVB5cXp0VHLntHH8a4AwZXi0B3rV8d2ixeNa3fnwsUBV6DVp3iBknTolBi5KQuQGJkYmRipfPWNlxh5+GPkjl1wRYDrvtFgrhXQwKaS9PYzt3NAwWPorX+nUFZWyzk0+XESZefgJMr9dD4GyJbczI9eKfMYqo+WTlh+HU8BVJOe8qdy61jxXfnlMRko3IJBdQwsLjDClupfDsSYv9Yig8/HZ2eXvgSjsul51oMCpgteJZfoeFxd4LAuOOlobKmNHNjw+KozAHabQa2bP6zK8nFSaumzlWyd/6tuWZ98zOlDSdv06T70yNtoHLHeNVb5T+OOK8dONse/+pHLvxqvHE+tXKILK37rV9LBUWJkYqTylhiZGMl9EiMPf4zsox274NJqlXNmDfSgaKuvZAyhdCw9zuPEsb5KkOOvFdAOCDRAnSw8PsuiPKicqifWr8rKPLrb8nrLn3lY1j8qncELO6I6nwMDto0LIB1T+yivoWvdV90H0uojCoiDwWDhQdOYi+VQQHI2VrljfzbPqT92yTJoondAwElA5YiFDS+iNHkFD6EL5YHt68DO8RTJtdbNH+oEMDvu4qAv3lgetUOQ2pHlYH91PsBjOX/j+GuBC7dvndM5XA5x7Vk/6rv6fIT6+3b4U5nZB4HF1+HmXa5Dp8TIxfOJkYmRiZGJkbsFIw/5Ga6YkAVVcg7F/V1C18qQBgUHD//x23H0vHMiTbIAFh5m5Fus0SeCm/vzbXsem/WketH+LX1o4LPRHQDxeMEXf2+BsM7HjsRJy/HqAp/7hY8wucSgsnJi5h9W1DlirKgg6bjqXyrvMt3zd+7vkpi257aaZHVODeioFrnfqGGfd/4Uc8Z4umjgH40MXatcQRoPnPSHw+FCe60c8xj8XXXA/sn27gOh4CvaKvA6/bo4cHHGf3yXgvXGvPPiKsbSKjHbVgGaY08X6cxbC2g1B/OcbJeW7lrzsO6Stk6JkYmRiZGJkYmRuwMj+2hHXprhBGClcBJwzh7Jgh9SbCUAJ5wLfq6GKDlQY0dS4An+dAwG0WjL43NSYMM4x1be1IFd0mZndDzr+Jok2MG1gjccDhd0O5lMMBwO7dgcfOqYEXwxH9sYmK9qcaDF9xgzEpbauqXDIFfJbQU9gJkfht9GO060WkFsLQy0qsn7kBXY3QKHdRTn1B8Y2BS41IbhLwG4al/n77x4aCWXGJcrQTG3JuhWTLqFGOcCF9MrKysLixTVn9qhNb/qS8GY2/LbsNzCxOlOY0P9L+SJMfQ3Vdi2qi9HMX5rgaNx6kCWF9JbqeAleUqM3ByDx0+MTIxMjEyM3C0YuWNbCoN5TgpskGXEjsgO6K40NSmqw/Jn/i2BCBbnKBqM7HThLOPxeC6oV1ZW5qqFeuud+XGJYtmWDpZXkxG35zckMf+6tcCBM7DpPJwAuTLJOlP9cLJ3Acs61B8h1O0Sal8NoNA386OJhfURfEXCUTu0dB7+wok3xm0tGFq8u7l4W4G21fHUf7iPJmddEPDYoa/QA1cQQ7YYI+bleTgZ6lYQtR/riZMR66AvBhUMONY4cXO1SxM6J3n1Rda1ArHqQHkKYt9VudjnVS5dJKmtol/YSHXnQIFzp6PxeDy3mGvJxPmilU9dPk7aOiVGJkZye+YzMTIxMjHy8MfIPjrkCy4GYTdpMBOMcjJc9pBZi3mXkLXiE4HV5yhMatggddJoG0DF88RnTvwKTK2kweMqTxrYrAdOHurY0Sf0r7fbmV83L5O+9nZjY2OhwqcgoosJBeeWnMp7jDUajQAAq6urC+NqImEAYp5Yhy7Zc9BxxY5BNj5rFZN5ZX70t0K4SqZVQAWWsHEr8DUp60JMgcgtKPriVm0HzPtZAK/z69a4cU71FfrlRR+DUvRZXV3FeDy2sczytYBMK2T6gG8LwJ0Mbh7lmfXGMcQLLM6hautWHnS+EmMF6DMPfYDUN5cuXJO2R4mRiZGJkYmRiZG7AyNbOgF2aEuhS0aRaOO8Bi4nGzZGJGVWEjsYC6ROE07N1RceV5NyBKm7Zan77RW4gly1hG9xcuVQ5XTy8rjOiH0A4/TK8rnEpAHAtlKbcaUygCT41zlDplbycAHLwBY24CALYIhE4hyfeXdBonJqtVV9xiVf3S/Pc6h9hsPhbI7V1dW57S08Pid+vsUd/3lREHxq/xYYqOy68In+DG7sw+ofWhVTCvuxHdUPNMlyX5afKY5peye3Hmd/idw0HA7nkrn2ieMa4+zr3I/Hb1GrOsa61piIWAvS8zwOH3eLCgV7Z98WoPTZPKmfEiM3KTEyMZLnSYxMjGQ6kjGyaBLbLpVS6urq6hxjOqkKGkHBTg34W4/8v+XcMaZTXrTR6h73cYmGHVaTspKTI8bRsXlMraYFAPYlSR432ruExAmbx2Ubqe0VrHmulq5a7Xlft0vqzjG1auOqTywDA7aTo1XNZH1z/6hQhm3iWGxv0AQddo+x+DwH+fr6+hz4RhuuIrV+0I8TFsvNPPSRS1Iqoy4eeEz2USd7fOZq4jIg19jkMTkhKxiwzZhayY/nUn9luzPPWqFVmZmHrcRS+LJW+ALI2O4xN4OmrlREAgAAIABJREFUxhPzqTlPbdSS351z+nG2HwwGWF9f/0St9aYLSk+ylBiZGJkYmRiZGLk7MHJ6sWqvug75gmswGNRwJL6qZ0OoEXifqSZgFTb6RRutckQ7Djydj9soaULSdn1ApwDpDKZJlasGEdAtvngcTWR8jB1CHW4wGMx+zXyrCUlBJ+RzVRvniKqDFsi32vH8KrcLBNWjjq+V1Dim1SAH5uoL7L+sF15MxPiRrLmS7HTkAl79kZO3yu9syIko2vODs+PxuOnDnFBZ725M5Y91oHKx7hmAXLIHFn/7ydmsz99ai6MYi2VTvbE+lNwiiNvqPm83htquBaJuO4cbl6udHLNqo2XyqM2db45Go7zg2gYlRsL2S4xMjEyMTIw80jByenfZXnAd8jNcmvDZWBqoQXFV6JKSJi8noBo/2nB/9wBhjMEJTedgZ+UA1qSmyZ4TOANb7KVm59FqIFc9OGFrEtZA1uAOoGJZOXAd3ypD/Ocr960ARt/xkIXtoclN2zJpZYzH5+SnyUeTSszNb7bhcaLNYDDAaDSae3CXkyW/3tUtXEqZfwU0BzXHgbNxHI+HODlOWgsW9jfd7sG6CwDhSqJSyOniQROx+iv7iXu4VPluxRbzov10MaILnxYga4Jt5ZJWwlV9K//s186v3Fi8gG4tKJYBuPLjfEZzs7O7yseyBC9uIZi0nBIjEyOXHU+MTIxUG8c4iZGbsh7uGLnjbykE5m87M7EitELAY7jKFYCFBBCkgcufteLHAcDA03JSlS36u+CKz5okWB6XTLTi4drGMXUidTStLEXy4AqAAiy31XG46sh9XaAqf60KiSaFllxsM3V0V6VloGC54jxXrFh+lmdlZQXr6+sLld+wYwuMeTxdkDg7KXiEnzLgtXQ1mXT74tnvtV+Q8lxKWagGhX+o77H+HRDyA/FaVeM51HdYf24h46qHXLmLN6HxVgP1HwcgjlS3vIVE8wwDBduT//PcDgRYDrZd32JJeeV5NMeGX7jtN6znlozBE/PBC4wWOCZtjRIjEyMTIxMjgcTIIxUj+2hHnuFaW1tbSPJsSGDzAVJ+ZakLaq3AhEBBGsxaCXNJsi9psXFCYRo0jjTJqQHU4MwfO5PKH/01WXN7dVJNBk5/LfDQJOj0pe0d6GpAaNJo6XvZ3Jz8eFtOJBQXLI5ntY2zl/LL57XS1LfgiWQ4GHQP2Y7HY2xsbGB1dXWuwsZ6Cl7ZB52v6kKGx2BQUX2wDNE2xoyHlqMCumzxpgslx6fqToFex1S9aFJWu/IY6tssbyyKdDGi/4HNRaX6hsqtczmdqC76co7rp/M5P3ALFvUnN17Y2Y2tvOvCtJSSz3BtkxIjEyMTIxMjEyN3B0ZerFsKnXHcea2o6H9Nyi4wNOi5P3/mxMRJySXVIDYqz8dVG2cwDno2WnxnY0TQsh64GtaqDLHsLpC1X9zm572urmKnc+h8EeD62leVT51ZAZB54MUE64u3Ujjg5LFcMnX9WCZdJKgcIWM8AKzVCl7ksE87vpjfSJD8ml6n7+jL21QCNMOeLaCPqmT4oiYptl3ox52L81pJdL7HiyQH2iwbg4iLgZiTk7mzoY7tco/GYAC6LoI0rwQPTk+OB5fruE9LH24Mltct1HQul3P0DgDnFY1d9pMYw1VvFaiSDo4SIxMjEyMTIxMj58fdjRh5yK+Fd4mFA9EpNBxVr6o56LWaw/1Vsa19zlol287CQR0sxghgqXXz1jQ7IAel8s/JpBWIzK+TO6or+vslLpBaPx7Ic/FrZSPxRaWVgT0Src6puuGEo7KoLjiphFytSksrseiCQoNE+7QSACdzrkhxslM7Mc8aA2pPnU+J+RsOhzM+OBGvrq7OdMt77NkPmQ9dGPDf6urq3JYMtbGCact3VB61lyb8kCXm0vEd0KhNWc/OjqyHeJaAeeExGUDVt1kH3J6TcR/guUWOtlMZdHzmm/nTKhvrFdiMJ81BLX7ZXxxfLhcnbY0SIxMjWTeJkYmRiZHzMu0WjNyR3+FyDIWRVClOQL7yDMajvQYKEztHzM3AwueB/nf/a/A5cs6txFUTBhU1FCcwHpvHYN6iPW8d4P3JnOh5Dg4WlU/l5PFUbnZyBtU4r6DrQCDGUWDkMZ1vsC/FOOwXnFB5QcJ6VV60UsN+ybZROwZPqn9OTMy76jd4dwlLdamVZLaLJuLBYDCXOJVUPqWWvXTuWMho8mJ5lQe3sIjj7FcuebtFAVe6VUYnl4Jg6Inn5MUjLxYdQCjgqZ/EOCyH2pJjQKuzum0kFnzMY5xj3nlPuv6WDes/9McVO7fA1yofz510cJQYmRjJcydGJkbG8cTIIwMjne8E7ciWQiekS57cx906DqGVYVYIO4WSztMKGuWhbz515Pivb5jhcdWAGkgsi+unzg3M3yJ1wMOOpQCuSZ77Kvi3ko2O5eZk/YVD87mWPTggnY50EcLfuVq2bBHAnzUp6duhmB99DSm/Qlj/oj+/ZUxtoHpXG6pOXaUqxgg9s/14fG4fFWOnk4gHTrK8EGF5WU/qTyEPy6L8awwqcHKCC9u4eZ29o40bL8gtKtlGrAeWgXXEeonPvJBlmfnh+mjb8munV20bfLjfIwo+9WFpllOr7H0xr/m2FWNJbUqMTIxMjEyMjHaJkfN8xHy7ASN3ZEsh/3fn3HdlyjlHBIabK4K8jx+uBrmE6hSkjszfuR0bRKtOmkBcEub5mFd1MCdvOIsmkWijwczyqK70Tx2a/7hCqJW3Fgi07BPJzQGytg+ZtNrJc2giYOoDGa2qhK5acrXG0Soc8+Qq2S6pK68Myuwn3K712yVObq0usZ/EsZYd1NYOtFqLBrdwZPncwor5Cnu4hYMmyzjW0kXw7WRlsHSytHJd+JFWAnVcp9OWH2s/jj/WG8vGvqJyA/OVSSeb+8667wOSpDYlRiZGJkYmRvJ8iZG7EyN3ZEuhKiWMHgK75Bl93C1SHZv/83xuzPiu/bkdn1OjqAFZDq2yAPOvrFT+XfLQuRQwFFR4bk36vI2ilM3bwOw0raBgkObKhF7Zqz3VNqorZ0sFb+af5+B+bouE8t8KrDivt3sVHFQO/n0OXQy4hKe6cODhki4Aq2e1h8oXn/l2vtOR++6ShbbRiqP6oiOWNW77q8/rnC6muU0c1y0RrbsDfD54VqBkcvM732A/ZXlbecr5SZxT/+vjqRVrGi8ud4UeNL/oHNy2FbOtfJm0fUqMTIxMjEyMTIw88jGyj3bsLYWc2Dg4NBEzc8tudTujqXCcCLUv/1dyQaKJnXls8RM8uCoj64dlVKfjhB392SFUFpcU3HhcodmKfl0Cd+c12PqAQx1aba6220qwqW5DVnfrl4HQAQmPo69bZnmdrzhgcf4Rt7VblUUNfAYhfmBb5+Fb9E4m5lP1F8Ch/g1s3t7vk5cXP+r/qjfVc4vXFr+uSqgVwT7Ac3M5O6icOp/LFUGawFvAwn3dQgFYfJtS/O/bAqJ5mH1+mV7c+byjtXOUGJkYmRiZGJkYmRi5Yy/NaBmjlUjZ+SL58XcNLv7fomjPrw3VxO7au3OtZOiCtJVU4rsmS9eOx+EHEZeBYTiQ48Nd4btKo+Pf6cLx4PTVAkHlvbUAcPO7hM2+FJXLqHDog+F8rgUMyrfqVcfjduz3LphV325+rdoFuKkvt/hn0OFkEHrmB2FV3y0fcMd5oRXjhx0YZNwigWPd6cuBm9qNbck2Uftsxb806bPd3KIw+I2xnSy8sNZ5mP+Wv7tqeSu3MG8xp/Mv1iHPo+36FsV6LGl7lBiZGKnyJEYmRrpxEiMPX4zsox274GKG47szijLHzsWCcLVPjaL9OGA5kahj9t22doAW5xmcuCKmFbLgRQGU+dWk06o+sl5bQcn/45x7awzzpceUZ/5jHmMu56g6vm674L58G5uThf4ORPiSBjDrwy0YYi6eb5n/hYzBd4wfn9nPaq1ze4BZpviuvhBjhVyOT07AHEuuwu2SdgswOSZK2XyugPurLfuSrtNhkCZWBW7mW/Wjfqd24xjv49EtFljPjli3LuHz3MqHm8MBVGvhpf2VT7ZJa1Gh43O8xhz6GyKt+fW48tLSYdJySoxMjEyMTIzkcRIjdxdGHvKWQsdUkLuS5HZxjt/U4xJ7kCpWlRPHRqPRnEO7dpE8nGNrsuNgbiV+dbJIOpzcow0nVAYrbqe3/2Ps2EMd5zgJOp40GbIeHAjG2Gon5Z95CBn4nIKwo77FRhxnXtx+bL5dz7pSIOL/TK3b3MyH49fZWwHKgVzwynrVfpwAWMdsRwadIF6YMLCwnzOAx+fRaDQ75uI15uFYZaBVf+I4cbrXfKGgwvJwHPW14/mVHChxfDlq8ag2Go/Hs9fWjsfjGb9uHI03/s85T/OEtlee+btu+Vnmy0xsf/7PizeXl5OWU2JkYmRiZGJkYuSRj5FOv0E7csEVxM6vzLMxHLHy1GDcXysp+hskTiE8h4IGz9dqz4EMzCtd+6p8LFPwORgMLIDG2C7ZqPx3fflTcKXrXKsbE0DLxApSMR6fc0mvOw+EONGklPY4PMeM7ymDXb/58Wafe/hXvhYDaXOcue8x4RbHnfWfVLz1ro9Y8JEIKLYZ9+VKj1abQz8rKyuzRNO30GpVAyOZR2LVpKW8MM8OWPUBcl0AOGB2YzFw9CVLTYgOSFwlP2TSN65polabKPHii19ZzXnAzd1a7ACwtlD5+3IR60BzhfoA21SPsx1bvLdylfMN57MuvyVtjxIjLzmM1NzLd0fcIk55Y37HD30eJj96UzZGNAJKQWFbEvZsYmDF2GDkuBQESM7kobEZixkk4+MCdhrcc37SfS/TEbZAIaPMD8yPMDeizjvXrz33nB5c7qVx2vqqwPpF2PN7d58bNzEyMfLixkhX2AnakQsuFUoXnSwIf1al8Xj6XY3jAID3JOtYqlT9/QvHY4sXNbgGBTBfUdCqBs83HA4xGo0WEoCruHH/q9zgR3Diza+/IG/SoVGVgNTqq/NbXmioj/PFU/w2CbD4o5LRTpM5J8CYS3+zgn2D5+XqGv8AZSzCeEE2GAxmVTwGEScHL1KAzdfuDofD2XcFM5aB9ROy6lYibcNjsa51HqfHOfua8eN4q5rnFizaVj9z1VRjXon54DasD62Uaj508jgQU2pV/RXgnF6Stk6JkZc8RrJcEYt8p2k0Gs1djOqCbW5xfI3rol7/Ngu6m8m8hePLIsid34lxXZtdEc37z0+MTIy8TGHkjt3h4knDmHr7Um/D83HdHsAOH8TH2aHZqVzCjfl4bB0zxlPw0bFYLndbkhfJLRBkfviH2fiVq7GgVn5mi/QsNF8ixL4BbAafbnGJtlxJ1S0/nDjjv1ab+TOw6Sc8P1egOMHEefVlHofPcczym544HnVMvavGc8TdN5ZVFzEqG4/ZSn66CHP6CuJfpOe5NFZdAnaAxTzpfFyFb+mWx1VedXsK+w/riPs7/TgA0Xn5vFv8at7cCngkbY8SIy9hjCT+g69WbId9VF99C7Gkyz6FjyRGJkaqXnRePn9xYeSOXHC1knArscf3vuSviTiIDafOpwDj+OkzBu/95vYMdGxoNZ4bk4PJVeRahlOQ48C/OBZDb7jLw/DNT35GjhY8/tsfPSjQeeNdH45vfOJ/AAC//bn3YN9xVzwovt7zsD/EZ//2H+aOxXgvPPH2mIzHWFlbxWNP/1DvOP/83Ffj1BedDAC495uej2vd4VZ4ze0egDM/+yUAwKNP+zusXW4fAF+R5aqqLjT6znEVhxcZcS7aRmLiLUe6IOGFh9vC4MZnHw1e9A5e7KvmxOVkaS1k+Pv6+vrcNgznO27hFd+dfLGobG0V0LzB57mdzsux7ezuYo3PK2AEGKjeWslb+VQbs84d0DmeHa9935087C+tvknbo8TISx8jOZeXUrC6uorRaIRSytzzJSHf3DyowO/eBfjCJ4FXfhK48vfPDz46ANznB4BjrgK85r8X5r7E6FE/CXztc8DJnwOOPu7S46NFj74d8NXPdp/fdBqw73LAZALc+4Tu2NFXAk7WdYjQi38TOOVvpp8/AlzzusADrgOce1Z37O3fAkrbjxIjEyOV177vO4mRO/IEtF4luitOrcZHkmSn42oEOxg7HVcUHDDEL66zE8U47pkXnlMdQ/uw0gHMGZkVz+OpQ43H4wWjcRvmi/nm/wdzAbSM7vO2l2DfccfgwjPOxkP//a+wfv6FuPCM7x70eL/4Ny+ejVcnB79QO3Du+bjwjLNx7zc9H4/64vvxqC++H3uPPRovOOF2OP+bZ+CRn38vzvv6d/DCE2/fHOPf/vRN+MjTXobbPOFBuOZP3Rxv/tnfxukf/z+46LvfwwM+9BrUjQlQK5533K3nklEkNK6qcrDrloTxeNzcQx3jsd+PRqPZRVb0izhRP48Y4vFWV1ftbXldnDG/GxsbWF1dne075gULJ4vwU6D/hwz53MrKClZXVy0I9SVolbPl4/HgrW7j4Dm0SqgPkfM5BlAFU+3DPGvFNI5p7mqBH9sj/vMxbuPyIX9n3oIHzT/cPj7rb9e4hXz4i/KdtH1KjLzkMTK2b0U/5pH/x+fWAnoWx+efDZxzRneBoDRcA974ReDlH1s8d0nSed/teKzt50guNXr8HYH/+mfghf+4eZE1HgH3OBY47xzg1Z8Cvvo54EE3bI/xit8F3vsq4NEvA078IeARNwO+9RXge2d2/c87B5hU4GePB4DEyMTIOd4ubYzc8S2FrARmUKsGDCQsQAjH59m5nKOxMp2BWjyFwvQ491HS+V3gsrMqwMY5xwt/1/3z2hYV+LMb/jzO/NyX8PhvfRQvvdZdMLpwPwYrA5x09sfwnKNvDgA47tpXx2995l0AgM+980P46198LADg9s94JG570oMBAG+6+2/i//3dv2Iy6p4xWrv8UbN5n7nnJhjuWcNjTv8Q/ujKtwUAnHDjH8OdXvB4vO4OD8GNfu1n8bOvfjo+/LQ/xT8/59UAgF9+98tw7TvfBmVlU0/PO/ZWWL9gP55y4D8PauH2xrs+HCgFj/ri+7Hn6MvjMV/t7noNhl2yWD//wmbfjdEYG+sjrOxdw2B1iPH+A91FFoBX/PgvYDIa43nH3RonnX2q9ZVISLFQ0eDihY4uFmILQfSNO1kR0Jo8+TPHRoyvccTfNzY2sLa2Nrf9gF91GvNubGxgfX197qHY4C3GW1lZmbt1r4uU+K8+HaTtIqZ14RnHuEINdGDG52rtHmDWfOC2YcT5ucUS8cXxyXbWyryOq7I58Na2rW1KMdfq6uqcvaJd6EIXq3yR73hhmTT3MGC63KhAqu1Vl0nbo8TISxYjQ5+xhYrzpItdzauqoxnd79pAAfDuc4A9+4A77QXiqajjTgBe9GHggdeZ7/MLvwOs7QPe9Jz546/+FHCN63af73EscNEF3d2Z934PuNsV5tve4CeAF/1j9/ltLwX+/And5ye/Hrj9fYFH/yTw5f+Zzndi9//9FwCDFeDOe7vvx1wFeOkpwAN+bH7sez8a2Hs54I3P7r4/850dr5/5GPD6LwAnXGOTP6C7wHz/+dgWXXRhdyG47/JAKcD+af8LzwOGQ2DfFQBU4KKecdcPABvjTu+Dlc0xAeCXrglsjIC7Xg5477nAeB1AYmRi5CWLkSof047c4YqraiaXXPlqkZmOxacqk7cv8JiqwGjDDqpOwJU/ndsBiVsIcx/32V3hqmx65R3ntU/ccla++NjDP/U3uPxVj8fzr3RbHPjeeXjy+f+O8YERnnXU/8Lqvr149GkfxJmf/RJeedP74Msf/jf81b0ejRvc72fw0898FP7xiS/Gv7/8LXjbLz8BX3jvKXjgR0+evfWQ6ckX/AfWz78QL73mHfE7X/47TEZjTEZj1EntPo83cOqLTsYpT/9z3PnFJ+G6974j3nS3h+P0j316NsYfXeUncNE5HX+lFIwPrOPpKzfsvStFQgMAHvCR1+L4H74GXnLNO+HCs87BytoqnnP0LfCM1RtjZW0VJ539r8vHEnr4p9+O39v/Cew7/hj87pn/jNWjOlDSQOTEoz8eGfaIpB1VkfivleTwBQ5i9bf4zHNtqqMbY21tbXZxx7wEn+yPXNWJsfbs2TNLzrpQY59duNDH/MKGkw4nH+4TCZATv1soqV7iGIPNvGssvhqbZY+cxItTruZFX63Gsb2UL53fLfZiTLYJz+cWmAwabsHKuU15VDBjPTM/8d/dSWF5nD36QCRpOSVGXjoYqQspnivygctn7q7cjN7w+e7i4G6X7xb/H9wPvOd73d2a8Qg48VrAy07tPt/ibsBjXwG85Y+A4Spwt4d0x5/xDuCHfxz49esD3/4KcM8rA+efA7zvvG574t2PAf72O13bq1y9u3P2nx8Cnnov4AOvBf70McD9nwL8zEOBp/8y8G8f2NxeBwB//XXggxcBK0PgjmsASjfemV8HnnCnbrzgdzwC3vrC7gLm7r/Rfa8VGI+7z6jAva7a8TceAR+4sJMXAM7+NvC/V4CH3mRRT0p//E/AD16/u2D93pmbx//+wPRO1zHAFa8MvOELy8dSevu3gQ/s73T8vvO6/0BiZGLkZQojD/kOV6114eqTJ2fm1Mn0apavSkvZrDxwEETy1ESpY3P7ONZSPhtIKxXch43PDqBJncGI9aIOEvzxOOFMXBXRc92xihKBVCuedP6/Y2VtFU8dfQrPGN4IB849Hy+95p0xGA5RBoPZq2k/ffK7MFhZmR6n8QcF7kUcZTDV1aTOLn4WiHi81+ufi3u+7tkYrAznzp909qkYrHbHhnvW8NTxp91IC3Sv1z0HP/+G53aylulctbtr9aTzPo7BygqevnJDPO/YW+NJ5358S2MGTTYmeNl17oH9Z52D5x9/G0zGG3jq+NMLiVT9je3jqiphW73VHHe62A84mcR4WunmtjG/e5A8YkYTYfyPOOKkzwsd9jn1Xa0IcUxzLLCuok0kdZZLEyvPzbrQeGJ96yKsZSNOsrx4dPmKqbWYVH7dIpl1znmMz+k8mg9DlwrQyi8DrYIR51nNc2wHxz+TA6ikrVFi5KWDkbwVPP7zdi4XG+4ZtYVcXDZxd9p481jHyOb3UuZxs1AbHiP+R7/JRMaUOeO3VkrpPtfa8RETlAJMpls9339Bd4frnlfeHKMMgBvcFrjvE4BT3gZc+8bdHadWfL/9W8DPHte1ucNqd0Hz/guAY68K/MPY91Gqk+75t5UhcK+rbF503XFPN947zwJ+5mjg/j8MvOlLWxszaLLRXRSOR8BdjurkeNd3Z7ZOjOwoMfLix8g+OuQ7XOp8PLGrDinDYWTuz8pkRwhn4DfEqVHU2ZknHZONqMbR3y7RW6kcPK5awbJygAcfCsAR6FxdYKdQ+QBgtP+i2ffRhfG5YPWovVi7wuVw0jmn4qRzTsUDP/rabuw9a7jh/e8+O36TB98bwz1rKIMBxvsPdHufZ2NN57hgPwBg9ai9KKVguG8P6mSC8UUHOr2Op4GyOsTGgXW87VeegOdc4Rb4xn9sPjg83LcXzzv2Vjhw7vlYv2A/aq0YXXgRxvsvwjL62199Iv7vOz/c9ZtUDPftAQrwou+7PZ65dpPZVsLZ3anJBOsX7Mdo/4HZGCvDlY6/9RHqeIyVPWsoKwP85W3uj1/7x7/AvuOPwWO//uHZ9kSXTNl/2B68j5svrLh/K0E7O3MCAbpEzBeAOpfzeU3KSsr3ZDLB+vr6XGJXX48tkXFMnznT/c7sy67qpvzwMbcQ5H6cA/gCV23TqhgyCLFOterPnzXOWU4dy/XhnBay8Xe3kOTvMZ76Sh+/jtcWkLJsuugGYKu5SVujxMhLByP1XCy2XZzzwpC3kW3J3y+6ADhwQTAAHKDt7Rsb3R2rlWG3DS9o/aLuAmRtb3dxsOeo6VjTvnuP2mxbK3Bgf3eRtLpnOtZqt2VuPAJW17q7U0C3bRGl4+Eex3R3yu68t5v7rV9bLstwOtboIqBOL9gO7O/G++vTu22UKN289/mBTXnXl2M5nnBn4E57gK98tusXMu89Cqgk+57u5VWYbHS6Xd/E8pmsowPdRWno737XBv7qK51e3nXWrHliZGLkJY2RfVQOFUAHg0Hl266tJBgMqcL1ORYXCNFWjR394rgGj45Za7dNi/fs6tV4fO8zDPdjWVsLEq00sDwKZDqHVi6i7QP+6WS8/9HPxVlf+PKs/eO/dQoGwxWMD6zjRd93+9nxq97wR/GAD/0lPv/eU/COBzxpdvyOz38cbvKgn8df/+JjcdqH23eHVtbW8LhvfBgAcPZpp+NVN7vv3PmbPeKXMNy3Z/YWwF94ywtxrTvcEn9x6/vjrM+fhkd+/n348xvfG6ML96MMBnjs6R/CC0/8KVzuysfhtz777ua8APDu3/gDfPbtfz/7/rD/fBuu+APdG41ecMLturcUrq7icd/8CADgnNO+jlfe7D74/lveCL/ynpfP+v3Tc16FU1/4WgDAvd/8AvzQHW81O/cnP/Iz2P/dczodfvsUvPiY29gtOlolif+6mGA/CttzEonAdD+CGvuy+bkxYH5hEnOG329sbMwelOUEEwlkMplgPB7PnmVwsdnywxhr2Q/7cTy1QEzba3xz9V6TsC5MQ7ZWLAPz1TxNzK49x69uSwD8MyOsc32LpMob7fk788G5T0EqZG3lWB6DZVJdaRu3hcN95zw0Go0+UWulX4FN6qPEyEsHI6O/Xszx3QTWl7sLGd/Xn/kuTN7yR8DnP9k9d/WY2wPnnd3d+fn5E+aZutoPAo97JfAbP95dwBx1BeDnHgE86BnAS34TeOefdceGa8Cf/Atw9R/t+t3n6t3FSxkA7/gOcMG5wN2v2H2/wrHA9W4FPHuKl+/8M+Avn9p9ftwrgJ+89+b8D7kxcMbp3ee3fRO499Xm+TvhmsBJrwFe9STgXr8F/Ot7gB+8fneNPj/nAAAgAElEQVRxc9/HAy98WHfXq4+Gq93Y55zRvSHwmtcDXvrR/j4A8Mif2HxL4Vu+3D3PNZl0d7wA4IpXAl73ue7zZz4GPOnuwE/dB3jMJpbjRQ8HPjp9S+FLT9ncRgl0so5HHX9v+AIuf5+rJUYmRi6MwTKprrTNdjFyNBphMplY4+7IBVfcpufbkcsSZTg5J0LnDEEKOAFIrkLIfZzx+4LB9Ws5PRuUF9JO7lZw9QFg9GNwjeMP+KeTceLNb9CUPengqE4meNEVb71wCzxsrvbSBMTn2W9qrQsLJ/4cfblS4xZXGgM8J28viGowvwExxnXPLcTcLtHowk4BqG/Bw+NrbLjFPCd93ubjfm9IF3Q6j26fCBu1FlbMF+cYNw4DDFfFnewuJ2hOcUDD8R524MWz8q5tXc5xuufjbI+WjvKCa3uUGHnpYCTrMOaO/4PBYLbAjpzAdwijX/A5ftZ7UG92p6Ye52g8Aj7xD8AT7wbc7M7AU94IHH189wzUnzwK+LvXA088Gfjp+3Z3rBzV2t0J+vXrAVe9RvdM2PFX822TPO0/H5f7xRMSI5EYqfpQm7VkdHywPZyO+i64duQthbxojP/qNC0nisQXxuPjqiB1nvgMLF5tK2DE/HycK1zO4KrgmNMBpZORP3MCD4fWB4X77pT0gWbSztJVrnftma1miaVOMCjTKnGtGJTNtxCiAIMy9ddBQUFZCPy4c8UJL3xEK0vqE+xHkzpBQSMJDgpWBt1ioqJ2LzaRylQkR+WPFyz8w6W1Vow3NjCY8nTu17+N/Wef2wQO5YlJ5XYJy+WMlu+7i1smfV4u+mheCdKc0lqIak5iO7nkr3O1Fre8SGVyIMTnXD7k8ePzVqqqPI+OnXTwlBh5yWMk51e9eOXx+MJKj890up0QOOubwJ89vnv74He+Brz1RcBDngV88GTg/36iO/7m5wE/dCPg2jfyY2yMgD/4xc03GL70t4Cnv30bTCRhMEC9+nWwUTD7aZo6GGCwMr3QqsAo7Fym/jKZ+sFQMBKLGFn2n4+VM0+3ccR9EyMTI2d9Ws6wVRoMBlXf6BFMuwVkEF8V8+KODcaLw+jDwBVzRdWKBXeAInwv8Bf9HVDEcbcY5rsh6oiuahZzaXVEnYJBlXkBgF895bU48WbX37KdkpJ2gt730D/Af73hPXPHeFETlTGuvmlctIDEVQwnk8ncq255jL7FpfKnAMCLUE20nDcYUBwpzyqzyyl94Ojm1IWggpzKrxfUqgfmk/nj3OaAJL6vr6/nHa5tUGLkpYORcWx1dXXueRx+SyEvjnXxxf9Hz3o36k23eIcraVfQymdOxeWedJeZ77DPuIuwxMjdgZHTXHPx3eHSyd2tfb2qBDDnePxbGfE/3urG/dS5XcKNMVvAwAHRdxXvHCL4CPnYEHzbmm8/Rxv9/Q+9Da3EfXicWmu35QDA1/7pk7MXWNRagVIAdq75AUPAze/T9rP/0/OhlTrrVzZH47Y8Hs8TctI4c61Y7mn7QnOXUmZzzyUe4r2GLcy4hfQx49yA8pyOws4kbYyjfsI24b84x74SfLLtZvyoLoXcmZlOSc/MP0pBnavOltmcM3kGgzk+5uxDOov2x1zr+3HsD/3AgnxM4/F4dp6TmHsgHmi/En/GN7CQA4DFN+a5+FawcBTxFf00JpUiJykfDtgU/JivSPKaEzmXuHymIMRjhY7duE4velx5YJm2U/VL8pQYecljZPSLu4Mhc8QR34Vjft3CdSb3f/8LcNEF3S4DAreKOpczof1KAQNL1346QIk8CzCYRZ8abyGcNmT9TJWwOVf4VPTD9O4OscuY0eEny1JnmGNjXjGHxxHcQFmOkQXApG7yWcE4OdUT48KmyrdGzC+NVwgjQX7D/QrFTPBTClDX9mFyg9vOYoRjS3eUKCVG7m6M3LEfPlYm4nvftgcmFZIV55yMj2uCVmOp0/WNp+f1v7bVQGi1D5nUoZSHOO4CZa7f9Px7H/w0nHv6t2bBz0Go/Lgr/9CZ6jv++JfWNUA1iPiNO3Gc23LyiHPj8XjuHP8P0t+aiX5ha30rYFBLXk3+bHP11/iubxiK39WJ//GbMLrAGI1Gs6S1sbGB8Xgce3wxGo3m5HOLL60kqc71v1bRgm9+cLbWzR9IbCX3eHA+fOBWT3wwbvvUhy3w57ZGsa7Zn/i8JlcGLpd8NWeoH8WcurBTe/SBF59XvreSBzTZa5wsGy/8nhedC4s20X+MqZV5x7/LdS3AU0BrLSCStk6JkZcwRlJbPcb8sC3i4mwBM9At8lf/+LdRvvrZxMhdjJH1qtfAhX/xP7M1WOBX6yInMTIxMmjH7nDpd2XOKZaFic9qdCbtG3OxA22FPx1Tx2q1c7ceOeidTKoTllPBzRmU53KybEw27JW9ytKqZqp86nQu0FtjBGnFR3WnCdLpR28981YTro5GQudqSSTU6BdbSOI4v1aYj/Nc8Tl+TV6BbjAYzN52xeDPeguf5Ip0gMh4PMb+/fsxGo1m33lrC48Rn4On0E3rHOtWx4sx40FWtVkrTnlMPs52iO8u6bXAxfmSbkFQvSqQsay8NUllXpagQ/9sN/6x2lZ8t2JCc5uO0eJJz4e/O13GYkFjPeSP52B4XAVtltvlXrftJWl7lBh5yWNktNOLkZYsfRhZNzvNzRFzJ0buIoyU2HKxzf8TI3cXRvbRxXKHiyd2Dsl91PmY8RY4BIVi1CBc5eHxXSWf+2kyZzl4Tie36sA5OhtWq5NuDnYapslkAkgbdSgelwGHgcwlcAY21ic7GlfNmAcXPEEc3PGKVuWTK0QKjlyNUaCJClp8Xl1dnf0FGESCUdCIublKxy+N4HMO0JnnSNKsD04GXOmbTCbYu3cvNjY2sL6+PvuL6t54PF6o6qk/sW7DfqxTB2jqWwr0/Nr5ufgom3bVbQLO9/g8J/q+pK7xy/pX+fk7Hw8bK+gwD6wz5tXFvdtW5RawrcUVgyL34zld3mA/by3eeIGiOlD96NwMumxvJyfzmnTwlBh5yWIkxwHnBeUN2BpGAkCdTOa2mSdG7j6MHMf2PYOF8VnHSIzcPRjZh5OHfMHVSrytZNxyJmA++YewGiAqrPZ3iuvrrw4Xx121qzXmVs7Fd95PznOx7ByAwduiTqe8TsdRO6jhNTkzfzwn667PuZwdWjrg4GBn1yqf40WDMcaJpDwcDrG2tjYDgeFwOAOSqNSp7By4PGeQJlFd8PA4WglheXWRxMknqn+1VuzZswfj8XgGJgcOHMBFF12E8Xg8AxV3EdRKvsqjsysvBpRYpoW94QbEggLMNcnrZ80T6kusJ+7L59h3NUZ1XPXPvphmuzkA1Rjg77qNtyWjjqO8qk9xe/Urp+dYDOmiU4HEyap6cMCXtD1KjLx0MNLF4UFjJLq7XPGGu8TI3YuRMULo3v12E8uUGLm7MLKPDvmCyzlnS/F6LL5zkER/l6x4LicoAwKAWSWFgaElgzqcS8hA5zD84CP/V7lbhnLHNHgcMOvn6UA2mMJh9IFqptCZJvkYj5OVc0aWhc+3FgmTyQRra2uodfNB53jgk/UebcJ2tdbZ76UAwNraGvbu3TsDDd4fHtW7GM8Bido3vjMvITNXg9hPuPrB8jJ4sT9wBVV9kaso4/EYa2tr2Ldv31xVL6p86nsaO0oKnCpH6+6W+m6RC3wdr7V4dD6nutFzWrVvJV533gFV31yOZyX2F/1hR+VN5eVFkIsR9RXWvY6l+a41Zl/FUUl51hymeWyZrpIWKTHy0sHI1mLoYDASkS/L/EsMEiN3H0YqaRwG74mRiZFKO7KlsHUb3jlny5lYAeGsC9uaptRKolpl0e9OOY6vPid0t3xZBlW4q0C25OHvIbs62+bnKW/El/IzGAwwGo0WHD7OtfTpbimHPbmaFlseYk4G2jgW7UopWF1dnemn70/1FmAR1bn/z9677MiRbed/KyIyM/JaVSySre5zWgLOHwLOQIDHHhnwzC/iN/AzeOAX8DP4KSwIguCRJgY00cCQjk73IdkkqyrvGTcPMn87v1i1M3mp+pOCMjZAVmZkxL6sy/ftWHvFDsgEghkMBqGPCuYxwIvJVfPb9cFelYPKinaUdFQ33tG9PakekKsff57nlud5yF/fbrdBvrvd7lGkNzYubcPbR7/fb91o8+AygBkb16Gy1jFd0tfjKtNTPnwKyDzh6fn+M9+xScWiU+TBJEttQqOrKkffL69nrS/WV+3fKTA/df4psojVc8rHP0XmMZn667ytdeXLS8eR354j+U393PfnczmyVW9ddxx5wRyZSP/O3axzrOPIy+LIc+XZUgpjjSvweGDmdwzPR3NigzqnIK1Dz9E69J0m+jtvmUdgSmTeqGJExnX+WEw+MXLQ4knKO9axT+07aiVhxqpLot4wuZZ+9Pv9sDOQ6g35aQRUHdb3lYIMiUpp/zXdAV3oUjN5/BAFEbo0TcNnUiSwmSRJQvoBOjwFcqoH1ZuXoY9aqk3H9KLApO35na88KSs5+f5kWWaDwSD8q+valsulVVXV2t3Jy1D7SFu8fFl9IE33KSW73S6MERlxvtplYu3JDv31qRc6hpgv6XmeSLnGE6DKxstKdenBmKI3kjoujR7rWGPF+w91+pVBtT3VQwwfYjtwKdF52Xob5HPs2thnPeeUHyRJ0tK/b68rn186jvw+HOl9+ikcSV+qujJz+N1x5OVxpMorduPRceTx86Vx5Lly9oYrSZL/0cz+v6Zp3pw559F3BU41CG9MDFCV4CNu3mB0YKcIi+t832Jk5Jcn6Y8fm4/A+WijGkuMAGPGFVOOl5M6ke+TmVlV1Y8cwZPHKQdTY6Xf2jbH/Racsf77JW4/oQiEJUShqQk4Ju0SrYNM9JiSS5LsI1Fc5/uuQK0AqGTHX52Q+L4rCHmH9HJRGSip+ZcTan/os9qlRr8Yf9M0NhgMbLfb2Xa7DTs5qYy9HSnA8b2u65BWom2r3/j0J188IPr61Q78dTGb1Pa8vGO+q33T8fJZiVpJXovav/dp317Mh2ITulOfY5MFbUPTdfy1qr9TutDrdCLkcTiGVSprj4n++q4cS8eR/7k5UieyT+HIsE1h0/bDjiMvjyNDDUnyiCNPcUfHkZfDkefK2Ruupmn+n0/WcCjeyc6lCKjDxM7xxuaFJP1rXRszMq/MyBjPGpmOz0f91AAV0D2AekLxSlQQ8aCn12vfGlIKE2v1g2t8JNO3qbLRaCW6A/gUjGMy0jY9oass1D76/X4gDlIu6EuWZZbneesBX9IhYqRC/5G/JwTtg8pTd1qCmJRUfIREI80+KqJkiD2VZRnk5tNKtK3YpELHYnaMumkfkMtmswntbbfbENFTmWvUij7o81pqyzo+tSPszYON9pk6YwDqz9fCeWpPpyZ7moahdXofjmGR99kYUPqIr8cl/zk2Dp/+EpvU+r7HJmmxSZvHBF11UJmpbXlf8CRGfZ5MdTIZk3FXOo78z8yRKrencqTu0kr9HUdeJkdiMfzV1DvVe8eRl8mRsTRxyrOlFMYIwzuJDi6mSF+nBwF/jheeB3Ft1//1RKSfFRz4rEamCtQokR+/J0M1ao1uqfIxfv0tNpYgh8NfyI/rlaD43cvN7BjlKooijJHcZT9WvT4mN/+ZerUvnlQUkHnAV1MklFiQd5IkIY8c8Ne2T5EB0S+KB1O1Of4qmCFbX7f+rpE66tEXKvK7gn6SJOHlln4sKie1W+y93+/bcDi03W5n/X7f1uu1bbfb8AJJjUwp6HjiV5uM2W1TtwFNb9K1xCZdOh4PtnqeEldsguiJwfdT7c7r0PsSckY32IxGTLW+c5NYf51OQlWmMZLXlCFfL3I4F12kbV9HrN2Tk0kp6geKQz7lpyufXzqO/D4c6SeGT+HIMtmvbHQc2XGk15/HZh1nx5GXx5HnyrPsUqgAqMe1c954VFAxwPRkFBvIKUHobyqUmJHE2vHOY2YtYXINwObvaGMO5vvNMXUsb7RmR6N/JN/wt+18gKaC06l3VXC+LnWHZXORjfbPGzJ/1dh1jPwFENnGVesgEpemaXjYN0mSQCyQCSDtl9U9OXq5+/Fqf/V6H2Wj75wTs3FAhPM04qbkhg6wQdriPJV/XdfBLmKRnX6/b9vtNti2RjTJ++dB4liftY+0h3zLsgxRUZWZJ7lYBPnUBFLH4idm/ruPGGm0lH6ScuP91tuq9lEBMjaZo03v+zoZ8aSMbLQujukzL8jLg3iMvPwkUq/T9mNj1uJtRscSIxg/YVLMZPLmdduVzysdR34fjqRe7zdfw5F6vNdx5EVz5I5NT5ydabob9qh1dhzZceSz7FKog9FB+KKK1AiTjxy0wK3Xe6QYHIK2fITAK0MBWgWkRZ2E8cQISEss2kBd5yKJarD6EKs3Vj2XurzhNPb4+S1kkCRJ2BHJj1NliMECSJpq4Z1KyS32uxqf9kF3ZlJA1V2U+v2+5Xneehkjv9NX6oQoFeyxJQUg/U3tAfvzwEZqgk5yPKl4W/Ljquv4W+W97hUUNULnbRMbiaUubDabcEyjnuzUtF6vA3kraDImCEWJis/6oGzdHKJDETtSeVBUvrEJm0bWdDKmelPC9+TlJ3dq31q/RjCxH48bPsoX8z3vl1qP70NZlq0oIyWWduSv12iyvyY2Tj7rOacI59TkXHWktkF/Yv3typeVjiO/D0d6WX8tR6o/dhx52Rw56Pdttx9QmyNlHKe4oeNIC23+V+XIc+VZbri0s+pMKlAFS47RWXVI/aupCF6QupsMwKKCi5GZGpD2W4u/VoFL+6a/Mx5+i0Xr9FpviLGxc84psvLjUuNh6RzZqiFpP31d/X4/OIKSsJ4DKQC43uk0VYFJAECs0Z/BYBBSIci1VjCkHj57wtQJBKkTuvyskSKVP/XGbEsBUKOAsUkF7TFmtW8f7dPxq21AEgCq/sY16FBlTT+9vHVSRopomqYhfYJjGnHkOurWh4RjPpQme9nou05839XONHIcszs/GfG/KV7EfMb7h5+c+omqXqdy8PXGIvjUo1G+U+f7SYz+ppjHudqGAvo5MI/hwKlJtp8MaJ9UT17u/nyv5658Xuk48ttz5Kl+fBVHHr72+32rDhjcceSFcqQbt7c56uw48jI58lx5tvdwqTAVeE8Z5CmANTsqDSWhlJjCY46ubavBeKHGhOuv1+/+GnXMWJ2+3574YtERvyz9qXSKxNrRPMBMo3Y+nUPBmP5otErBNkZaHFdi4R/1qFz4Xte1DYfDQCBJckyJIGqn7xPRes2OqRAxsodU9Dwf3WN8uvSrfyEvlQuy9xsHqF2qbmOyTZKklRYBYBCZRu9+sqVgqeTOWKqqssFgECZWROyQH//SNLXdbtfqY1EUrQgjctfILVvslmUZXnxc11XrHCW2GPCpLDxh6Hj0HD3m/f7UMZWP2rtiik4QtK5Yv3V8fPc+rn6ppOn15P1f/Vz9Se2DOpmEmFmUKPwENman6pfalhKpJ2W/6hHDgq58fuk48vtwZKyPX8ORlLIqLbGOI/l+qRxJGy2OlDZ13B1HdhxJeZZNM2hU96T3d386aAUDvSv3xuQHrsWDnT9Pz1eDARhUUHqdj1qpEflxa19jBun7Rv1ab6xvSi6e8Hwkom7a0R0AXsHJEw4gg764To1bI66qJ5W/1yUOoNcpUEIWXE/Ebjgc2mAwCISgETQfteG4Opg6BLqjHdWxkoMe1/HFJi96zAMF9hIDR3Xguq5bD1rTF10tUrvQfHYPHNoXbR+ZVlUVyIpjq9UqjGm73YY20Fee59GHPoMMmse7pSmA+YkN1zJeTxJ6jOPed2NEHcMHT1IeLM2OKQwesH0AgxKbKPrJrH6mXrVTtSlfvFw86TEOxSO/8qHj1f54n/HPQHjZKgbFJjO+f135stJx5PfjSO33UznSzKzYFZYe0s86jrxMjkyRjbM7+qG23HHk8fOlcKTHYS3PsmkGzkBURJ3Cg6IKAGfSiLkHMu+83mA94KoyY797IWk9McfRooas9avxeiDyBqEk4kkyFlnkuseRiQNgpcelfcZ/aux6TI1HDSh21855ALdGnMh9ps4YuKNP8s55Q/xgMGhF66jTE5Xvj4/CcR2FcVCnJz0FFJWJ6l2Jh3MVuCkaLdG+eT3mef6oXd++n1ghO6JrkLVGMZumCfnoPFRNPzQaN51ObTAYWFEUoU/kbldVFbbOVTmxS5QHQ7UJby/qB3rMTy79pE0jTCoT/d1PwnykSj8jK9Wn9sdP4mJjoz4Pzr5NtR/+klLhCesUqfrJipeBXhPDIbUbP2GFnOi3J2TfD2+HOt7uhuvLS8eR34cj9Tw//i/lyIbXsKTxVTTO6zjyvz5HmtiLPvsVs6WOI9v1XQJH+tVyLc/2DJd2Tg0FR8ZQPHH4NzXrwLX+mFJiNwcxA1QQ8HVr8XfyMRL032PA7Z0PQ1ZFesWpYQOE6oAYyFHeDZ1s9UdBkO+lROR8hIk6aS8WifLAzjtCYs6sTpTneWhzMBiEv8PhMNRD1E5JhH+ae+1l5mWu4+McTcugb7F8bgVfrVNlqPajelZyoP4kSWy73YbPfoKkcuW3FrkfZEdUTB8WVv9Q8FKSh1SwQ/xuOByGMWZZZtvt1na7XdCZkjM2E8afoIe2bDwp629qu/pZbUr1p+CrhEF9p4BPiUVt2k8itHiCocSIxU+QtS+ePJWoPG557IldG7Mv1aWOXfvpScrb2ylc1QmOb/tUe1358tJx5PfgyHj/voYjQ32WWNJx5EVzZGAamXDH0ks7juw40pdn26WQRj3Aa8djd+aAUiz3mOINVh2Lun1fEIYahILQOYNQ0Pft+j7Gfud4DLj1PDUW3x5/PYHQv+MKV/YoSqHG4eVC0SVuJT305OVMrrLKTfut0Rf6wXa1vDsEcOc7wKbvC/E68E7MeWmahveXUABvrldyaJqm1Y5GAFVGAG+/3xdZt6OuOmaVs+a9h/QD0a8SCHUSPfOrjHzWZxW8zfnIDXWp7NApZI2OVI5N0wQ/pE3Ko0mcPQYWiMj7HPJUgPUg6FdMvIz1XP4qyfOd9mPRbZURxBMDZP3ux+j177FDJ6/+Gu2/nh+zdb776KbqXcdDnd5fVI/at9gYqddH+bwMvE668vml48hvz5E6kdrtdk/myMbM0iy1RCZ3HUdeHkcadiI2G8N+35+OIzuOfLYbLgxDIxaqNB2ICt9Hr3QJNkYsMXCNGaqvy1+vDumVoX1RA9K3v3ti9ADsiYw2tM8qp5gs1RgfOdlhqFXV3omKB0LVEPnsyVejXSo/8qj5Lc9zMzNbrVahHX1w1sxCzjO7KdEHyAMgY5cl2tc+etl5clEApJ8+3QaA9vaDXTLOWP66AjL90JU/LeiN49i+viBW/5Efrf6BfSowUrf2U0FQ++rJiUmZtoOdEqVDJ3Vdh1z2PM9ts9m0bF7HXNd1eIYrFXv3kyvv39hbjERivh2zffQNlui4YgTj6/ZkRTux9n1RbIqRCG17fDlFiNq21umfl9FImvZf24xNCPw5qhetJxat89fw3f/mb8i78nml48jvwJGH4ndr/BqOLOChJLX0cIPUceTlcqSZhc1TOKYY3HHk5XLkObk9yw2XGsupu3FvKLHOmrVBg+OnjM8L0B/T+nXJ95QB+M8xZz1nPP6YjoFzFPh8e754cmqRZsP1j+WhhOflGTtXnVGjR1xfFEUASQU35Kp/aY9InW5jC/grIXh56U2gyoE2qFsJOLa9MtFEvRYS1IgcTuUjJb5Or2fVh0bFfB55zMn5rrLWdvQFmB6INVqpgED/NXqn8tVnR0hR2Ww2geB5AaROpmL+pfpS0vD91N9Vdj4C7H1HJ0Y68YtNxrQfugWv4o3Kx0e/PAh7kNXrz/mpH5/XtydbPvv+6G86eUAGp/LMPaZou9gDvuzx1BOeJ02Py135utJx5LfnSP9Zx/7FHFnvP5dVab267jgyUuelcCQtNfbYl9SPO460Vl0dRz7TLoUYUyyCRge1Y77DalAMWuvzdXqiwDBijqnn+/r0fB8Z8f0GmDyhsNStgKP9onjDUTLw7XnjVwJ8RAr2eOnVy0nHoX3Sh3m1b9qeAlpd163nCRSwAVR9CSP552mahpxp1bcSiBqv15vmlHMM+dC2khDt851jp5zap+poGxrB4btGjryDQiSnrkXuKj8/9pg98BdQAUCIQvpIIvW3cs9lxyXkSkSa39H3I/K0tg/HiEbHEItkqu7PXU/xESZt+5RNozcFZHSiMtF6qMM/3K51nvJrvp+aJMeKxy/VrScxre9UxE3r9LajKSWxumP90vM1kv05Y+vK49Jx5PflSK2PcXs5fYoj2TSjqY+rkh1HXiZH1sjmRB9iNttx5OVw5LnyLLsUeoPh+CmwjBmDFzAAYPY4fzOmTBWWAquCY6zvMcHG+uKVcAq8zxkDhq5AdC4a58s5I/DjiRG1gpXKTOunXxohIxKVZZkVRfEoQgRRaISu1+uFlzZSH0TDWAFYjfx5olJSUPIws9bDrQoiSia+Xi9nrtXoiKZI+LaVIDTKqQSjNqD5555kYoBCWxpV9BMt1bMSh+8j1/vJXsxXNbedc1sRTTE7T6Te9vyk6JStxgA7Ri6eyHykNea//vM5IlI5x3DC/+YnPz7qp3qM9e/U+GL+7Y+pjGKRzpi8PL748zxm+QmSJ6qufFnpOPL7caTvy9dyZGJmKp2OIy+XI8NxZzexvx1HXh5HnuPJZ0sp1EY8SMY674sfbGyQMcONERFFI03njCrWF21HJ5+xtrXEDNw7mjeyGPh7Y1FZq4OmTr4eeLSvGg2MyUT7PJlMbD6fh9+oj2iPgrZ/yNd/b5qm9e4QNXCiT4Csj7j5NAz6SF3Ih3YgIG2DumLkq7I+1a7+jQElRKHApBOpNE0fveNKyUjB3vfLzFrypg7fH7UJBWnq9/3S8TJJ4BxvCxiRFmIAACAASURBVHVdh5Qa/1usaA5+DAxjAH4KqLQ/Hku831BiE0ElAD1+inhix9WnPJ55EtXjsYml1q02HZvs+Yix1uPHfYqMPiUnf40SJ8XbVVc+v3QceSzfiiM9RjyFI/24zTqOvHSO1KL+feoc317HkZfHkc+6LXwM/FSICkxe4LEtK/31p4Db/37qszqSjxxoW6eOK0H6h/jUQWL9568uP6qivWJj5BxTvjfsmOGeIzJ+j0VIe72e7Xa7cG1RFCE6Z2ZhuV3TFSARfd8FY/AAppEs9OLJhnYUFLxuvONpDnesTi8HwJy2fFTKT45ielY5+2Vvs+PuTPqCPcgvZqM6fm8Pqj/65glUU1xC7vmhXSZZmr9PXUo6/uF3bfuUvcYmSnqOt8VTIEtdyEcji57Q1Gb95DPm0zF/V6KIyfkUwehvXo++nAJ0D/gqo1Oy9sfPYZ/XifedU330Y4jJpyufVzqO/D4cqWN4CkfG2u048jI5sj42eHKDmI4jL5cjz20q9Sw3XNrRc4CsHVRQiAEn15xSrv9dAUmBnfM+tVTor+U8vaP20TBfPmcy4kHCjyXWz7jCGy58JEcPhp8CJe8IOK9Z26EVwHjYN0mSELHL89yGw2ErV52i4AWAe3A+lTahjqX53HVdt9pQIlBy8nV42+A756VpGohTx+xtWGXo88wV5DX1gXr0AVbVA/Wfszev0xhZel/zhOmjjSoLtSfkQUqhnwT5v97OToG6jl3BEx/QiUbsulOkoPJRMNWot/ZRJyF6/BRQ+wmNH7dvx197yhdV536MMVlrHbEJViyCG5uQesLyY4pNJLrydaXjyG/NkY8nV1/LkfCsJcd+dBx5oRzp7Mzzhx7vOLLjSC3PsmmGVy7CxDAoKij/ux/UqTYoXqAK+vzuI3TeYPz3T6Vr+Hb1blaP6+49MbL0MgOMABit7zRBJeF/ztF3XOi4PDCpTGPtJEli6/Xarq6urGmaEMFDr1mWWZ7nNplMwvs4+v2+5XkeXuSYJEkgG/Sjy/YakVMiV2D0cjVrR37Rn0ahYhMFrlEb8Y7p/0JSHqBpL9afpmk/VHoOaHX83kf8A6tKBFo0qhsDVM051z4o0cZs3EeFkyQ5bpphj/3CT160eJ/09qZ9ixGQYopixanIX8x/ud4f9zKJEZSv2/uzH4vqJkYgvk6VnZ4fk5OXl++nv0bHrPLS6LKWGNHG7LcrX1Y6jvw+HOnxgna/liO1dBx5uRxJLY3Uo9fGbLfjyI4jzZ45pdDssXB8J72D651nDMxjbZwCwNh5KEgf8ozV6RXpHcaPBTBQElBQw+A9mOsydkx5p4qOr2masGtSXR8Nh774cezPe7z7VMwwubau69Zb7rmGyB3AnOe5DQaD8I+cdH+eyt1HWJFjTH/eWZCh5r5rFM8DZMypvY1iG7RHHTpRUNnEiE4nD/64rzcWbSHy6FNyPDCoDTE+P6lR+/LkpN95jmC327VsS/WkYzAza+rHDz57O/UPF2vU6BRgqo2o78SKJ2uVUawd5B5bFYhNLGLtcszbiteX2vE5/46RsNpVDHv8uPQ6f55fidAo/Lni6z23ktKVzy8dR357jtT6kPvXcmTi2ug4suPIxE5wZMS/vJ12HPlflyPPXf8suxTGDEqXZs3iZIAj00nAyud+c64Cha+La2JkFVOG7wffvWH54iNlMaPxbauzaBRGyc1HZ7yR6+fWeGRb+LIsrdfrtQhFndRHSmMRQD5DTHVdW57nliT7/HR2PVIHJk9dyYStbrUPSn5m7QdVY85MX/wyt4K91oujA8xqN3Vdt873Tu/TN9CxP6ZjUN3QBu3qBANSgPSa5vgOEuqKrSopKHu5MZ4kOT5U7VNJvPMjMw9WkD999uTh6/ETIQUqve7chOnUJE314Z9Z8bbvgwbet/0YfL/U9nzxmKFtavF2qe3pd+0Px/RZhViU1ROv76/qWn3E+5SmPiHbmL/FZOf70ZUvLx1Hfh+O9P7/FI4Mqxn7nMKOIy+YIz1b+DpiNt9x5L5cOkc+2wqXF5wng1Pn0Xn9zRuWmT0ClBjw81lBk6ICVyVoH7witc4YyajReIP0TqyK9O3GDDdGJK2xHz6nYhwqPyXqmOOfkrPZHkQnk0mr71m2f/kfueiDwcBGo1FIkdAIHufQjkaPPGjGJiIqC6JTfnKi50IeGh1TkDez1m/kzut7JxTAff98JMTrXX+jXk9WHshjdqqTAiVrJT8lTa2j3++3CNODsY5R5ai7lJ0CGL0uRpyceyod5VNg7MlSf/O+orbAtfzu++T1hhz89R609RjXaPH1aXuxSWzMzmN1KXb6cWuhbiUMfw56jeEncvVErnLXz94muvLlpePIb8+ROnFUn9HfP5cjfek4st2/S+LINHls397+abfjyI4jtTzLDZcKG8Cg83qHrefw3ed3++i6CjfmxDpgPebz1fkeU9I5Yfrj3kEUnD2xeKfyY/PFAwjH/PF9HY/7TrSN64jmER1Qg4+1S93k1/PZzEJkjjzz0Whkw+HQBoOBjcdjy/M8kIw+mEsfkD1b5tIP3hiPfgF0dQL+KiEwBv1ddaE68//UGb3jxyJRftekmL0rQPnJBfUhV28zlKIowvmqR4qCpZdPzB/8JCIWIVS/ybLMdrvdI5BJksQkp6ZlL2matvRLORWB17b1uLdLiFdJVEGfemKgj5z8BE5J3o8vNmHT/p2ajGrfz6UjxOpXDKQO/CXWfkx2KoMYOcReXBzzfz/pjJXYDUJXPq90HPntOTLmD1/LkaF920+4O468XI7cunP0L587jrxcjjx3zrPccMXAP9ZpL1AVkApDz1EBqSOfG5Qul7NsHRM05/goTiz6lyTt/FMlBr1jj8nFF0+QumLAbwr+MTmQ5FCLoccIHAJYrVahXh27gqqOR5f1NWIHePAQMORByoTmjUMK1IcuID21B3VECJB/fhKhION1QJ1KaEro3iHUEX1dClIeeDwQIFvGqITiU4IU3PS49lWfpyCax5iwQ03D8AAHeejYYyDEhCHLskBmKhc/+UnEfnlZpT+naRrr9/tWFEXUJ2Lg5mXMbx4ztO963BOJnufTLvxkjetbL7V07atteXtCN34C6K+N2V0MQzzp0T8dd2zSeiqSGMNiX3Ryovjj5fYpwulKvHQc+e050vvjUziSPDLq6DjygjkSu7LHwQ9vvx1HXh5HnivPvmmGfldD8AM/NzhvVP54DFhV2doO5+lv3kB95CBmwNTnDc5HcnxbnOPrMzuS3na7DQrM87xl4D5qoM5rZpalacuRAHAfkVIy9jLTY/xbLpc2m81CtE3JAcLQuqmHc/h9NBoF4Oz3+wEYiYhpv0hj0PpUtirjmH4hITOLRitjEwrVOf9ikTWKgr5eqxMDioKYRs/0XHXaU+ktFAVJ+kk93CxpXxiDTiL0en6PRbRV7/sTj9fGcsf1s05oPMnrOQpgsUmj9k1l5CO3vg+0i6/GCM2TkX+HiSeB2KRD6/P1xnQZsyc9R2Xg7cmPWXXs6+H7Kdw5V7Rej69fWldXjqXjyPY534IjY/jytRzJCn9THyfvHUdeKEfShj3mLpV3x5Ht+ny9l8iRz/YeLrPHd6R6N+mPafHCPFU356qzK1CogerdMMfVMNWYYmTj+81nfUhT+3/OobyRNs3xre9ExDabzaNlUurWXFPdTlf7forkttutDQaDlgzVUdM0tel0akVRhAhfmqY2HA5tOBy2ABqiUIfJ87wVKSPy4AnazMKb2mlXZaHRP9oiokQEsWmakPIWs7HdbhcijQoESnScS/3IUPuqETgvU42KKKl5EinLMkwM/FbGfktb9Ir80JO+VBNyVjtTgNFJhD+P4qOICu6etIi4BvJMkUPSsgXthwdelVUMCzhPH9RWX9U6vdx1TEoc9EcnVDFiiIGjnyj4yZhO6ugbv6v+1f5VxhT1Q188Keqx2CRIj8cwTicKPprtxxqbGFP8eLrydaXjSGud+9+TI9XXzexJHEmp6sr6B5l1HHmZHKk+0OJIZ5cdR3Yc6cuz3nB5B/a/xb574XmQ1siDrwMBeLLgXH7HYPV4zKgUPGLC1j6ZPU730Bx4DEoVyAOz2+3WzPago+AbIyTqpZ2Q9xsM+die2dGQR6NR2M5UncQ7qdmecHq9nr148cKqqrLtdmvX19eWpmmI3uk7NzQ3XYEb4lHZeQLTZ4SQDwTAP16miP407171i4EzdgVVZKHHkQ1yoI8aiVL5+5sQ72CxiJ0CqYK2Tl4Gg0GI5imIq9ywe0iG+pRUFIypi89+LIxZyaosSyuKopXWoPJV0PH1aVGgiQGkykrPUTJELzE/0Ikc/kD095R+lEBi/h0bi8euU0TqCUWxJIZhftxKtNio5pH79um71ql/tZ1YdN/3Xc/Xnet0khebCJ8is658fuk48hty5KHgR0/hSKR6c3Njyf2040jrONIcR3qc19Jx5OVw5DmefLYbLm1EheePnxK0ClKdQUEpZoj6u48QeAJQo/S708TGoIrQO3cFKwU5lKFbkKZp2iKR7XYb+kOkajQaWa/Xs/V6bWVZ2mg0ejRergnfD/0a5INWVE3Hk2WZvXjxwlarla3X65aB0Feum0wmlqapPTw8hC1ukRn90eiXgh7RN823JlIXczaAj/SIXq9nw+HQkuSYigHI+7fN0z5Aq/LxEwpPEoxBJwTIiwipytw7sIKmymK320UBEvDWZXvtEzn76tgApu7CpP3VPGhkjowgErVLBfmyLIOOfP5xkiQ2HA5ts9m0nu/wK6pN086jRq+Qnvov49MJBjKj/2pnalPIhAdkfcnzPHxWQPYRJiUnH0nUiZnHmVPE5nWI7PirY1RAVxnqQ/raT61DccqTh8dDJSePZb7/WnQizjXaVz/+c0TSlU+XjiO/LUfSpzTdp5M9hSMpg/7ANoe0xo4jL5MjV27MuqJqdtz9ruPIY+k48jCus79+ZjlFGAxOB+qJxndY69PlXX8edcaASs/RPqlRcTwGkHqtlnP1aFtmFlINAJP5fB4AljFSF2kHaZraarUKoKI53I/6cvjLA7n0BQfNsizkvUNmEAppCEyuX758aZvNxtbrtU2nUzPbO9p4PG6NiX4rMHAMcoQEFBBj5KUPD/u8dz+58G3q74w9lsrhZadEoOc0TdPaqjZGGFpfbFLideSjaCovBfQYyVAfNkA/dOw+WuZTBLRO9RXsgjQUnQCgL/0Xop7JgWAlOqoPedOOL/4YcvTRWPqtxKG6YjwKgE3Tzl9XGSrpcr6fEKrPet9WWeskxU+I/bWeMH2dagN6Xuwv+kK//KaErrrTnbC8j6oeFJdUB1yL/Gg3pseufFnpOPLbcyTyzPO8tVLyNRyZpqlVZjZfzO3msB18x5GXy5GHhoJcdZxsP2/WcWTs2kvmyGdPKaR4wI0dV4VrZIR6GKwHIk9S1KuftQ2KpjNonaqoT92hal16vjeCoijC28npFw/EqrzSNLX7+3vL89yqqrLJZGJ1vd/6EsDd7XatyEzTNGGFq9/r2Ww2a+Whl2UZ6iFqMxwOw2d1JracffHiRThnOByG6BiyIrrGOFRf9M2nfmC0+kJdM2vt2kR+O3n6TdOEyAzL+TiGj75QJ06ETJUkFEg0+qkApGDgAVj16a/1kbpTgKRy12VxTXNQm0BvOja+a1SOf17GGplDZ/zVfigw+5UsJWiVm26JjA6LomhFrgF+PyFQ4vKTN/7y4lC1UQ9w9A9dq561bi0qV+TgyVfb1Do8Lp0iCOpBJv5cPvuIpmKbPzfWDvLRyYR/fkH7qpMBJijoykeJfR3djdbzlo4jvx1H+hcNP4UjN72eFWY2m80sX+cdR0bs8VI4si+po6w0ohd4pePIjiNj5ck3XDiVEoMCNb9rp2Kd1u8qaH+XrefzWdv1d/V6DvUBclyjhtI0TWuZ0t+50z+N9HAt36uqCuCjqQSqWDV+jYgURWFZloWXKvJ7WZYhkpNlmaUHxxrkuY3HY5tOp7Zer22z2dh4PA511nUdgBoQJ2WDNpbLZSAyxpLneWunJVI7GINGCpIkaZGkWXsiwDj0gWfSJJRYYsaL/P3yvgKt6iIWDeYYfY1NAPy/wWDQcjpPTrEJj/8es0ONSAOOpDFwnY7Xg61vi/ErUDMm7YMHdx42VvnQN42gKbkeOhLsgfaUzOmrRnI5prrx4K/ypf/qe/q7XqNRTA/yMf9XH1bMUjl4Pccmuh7bfH904uvxKjYR5hy1/3MTXN9Xs2M0FV0PBoPwfIq3G9pRe4z5iL9B+NzJdleOpePI78OR/X7fxuNxqOspHEkZjUbhpq3jyMvkyFJuKtVPkIEGDzqOvDyOPFeefMOFgyBcFZRG2bSD2jFPMnqH7g1E2zA7Oh51K6AooWHUapTathoPv2l0DiNRo1HwUwfGCb0haOQQR9SicgL4AQhNpwA4ksMaV3mInHAuIAhZLBaL8J4RgIeoIFGlyWRiTbPPKdddl4bDYSAzQB+yQf4qN00RUBugTcYIMfnvGDQRBY2qqqHrMSUKrtfIFN8VFLATtRWveyYDRFI1ouSdWG0pSZKwa1IAaImYcq4HLe2vjisma/SmfsAxjeopICowA6R5nofI2znQ09I7XMf5XBsbp58M+r6of2ibTA4UAxR8Fdz1uPdl1YG3E8hOwV/twsxak4jYzYZOCHR8flzoSNvBzrxt67WKZ9QRa8fbuUby9FkPoq4qP198ak5s3F35stJx5PfhSDOz9Xod2nsKR9LHsihtJKsUHUdeHkcyImxc/VNXkzqO7DjSl2d7hgvBKlCfIgz/mQH6ZW41Rm+kXK8Gr8qNObvWG7t717tpPzYAW+96vXK1be2D76/KREENg4AYhsNhOJ9c8yDLw9CSg/PxEr3RaBSih4PBIEThFKTMjjmqWZaF9AjdXpZIwN/93d/Zzz//bEVR2Hq9tvV6HVbE9IWZRBfVuDXyR8QPfQ0GA0uSpBUVBEi8kXtn53ydICBTwEf1Th0edFTnPm3C25Ae807mr/EgcGoMCiKMB3kpOaufMCZsV5+70rF7svK2TyHKi/w9idHO0e6O0T+1I+rysvXkof2K+YCPbPlrNGLuiQq5aB9imKGTBp0UAraa507bOh50oIR6CnhV5tq29kn7Sd3eX7Uu7Ia+q54YE/Vrm36CoeNT3Z+y8a58fek48ttzpN4QPpUjg1zNOo68cI7041Uf0To7juw40pdnffExzq2DP3WuGrj+hmJ9FC5GUhQfCYgpL6asGLjEwIPiU0IUAPldIxiqfO1PXdfhYVu2ZKc+QHy73Yax93q94PRE1bL0GAEgT137NBwObbfb2WAweAR0fuxmx0iFpjSYmd3e3trPP/9sw+EwbIP78PBgv/zyi/3pT3+y9+/f22azCedr5Ix66T+kQr/RhdoCzul1ii1AYlzP7keQlDoLdalDaxQFmXhZeNtSQPHg7O1HnR156DVapwdY7Y+OQcEP2ejEA/noRARZqCxVjloHn73tqvzD9cnR/9RuPShhBwr21K0TBj/JYxLiwUx1C4GpfNTmaF8j+LEILalL9EEj2VpvDNhVTopJGin2duPtR4u3qVN697pR+TFWcvzxER2j2qpiV2zy4W1T2+7Kl5WOI78DRx4mW6QHPpUjzSysZHUc2batS+PIw0AfTcy1D+i248jL4shz5Vk3zaBxT8wx59PB40BeKDGC1/PUALXt2J2y9k//xiaWMSDSSJfWmyTthxJjkQf/GSXmeR5yhGNRx7qurSgK2263lud5SFlIkiSscKUiJxxEgcID5ykHT9M0vG9CU8b+8R//0f7pn/7Jrq6u7Obmxn7/+9/bH/7wB/vbv/1b++Mf/2j/9m//Zv/8z/9s79+/b8medki9APD1hi42MfB6ZVzq6EqajIuUBmSr0U2v/xjIKtkoaMTsxgOcBx1IW6NASoaehJSwPHB5AjKzVlqE+onKXknNA59GXbUfPs9fo4fBX9J2vjZ/Y6Dr/Vd9QwnNpxyYxf1Bz9FrNPVBz9Gceu2fRr00f/6UryIbjxEqG7UJ/irRezs/RfgeyNVfT2GrRjxjtqPnqa6V1P0kRvWnOvsUoXTl06XjyG/DkVmWhWufypFBtoeVsY4jL5cjE3dT8Ygjk44jOf8SOfJcebYVLgSmd9FavAN5A411mvpU4b5dPa7EpGDif9fjMdJB6D6CFCMes+PuJyhUDUwdS0FQ3wav/fUGpO0VRREcJ7QtOzEBuqQI4jTqUGbH5Xja1HxWTXvQt9Lvdjv7+PGj3d/f27/+67/a1dWV/fVf/7W9fPkyvC1eSQ07IIqgaS2n5KmA6SOsOgZy5JGFgqhf3vf2kiTJo4iF/qY68CCttqQ2r/32bamTe8flbyyK53P0lVw9Ceo5atMeMCjqU2qvmo+t4Kt2nMpn3m0TI1c/IWNiwQRJZUx92Ig+XI6NMmafMhNLDfARRR23nyAqbiErP2aVh9qrtkXdijvn0qd83XqeTqCQifqrgr72y5+ndXvCiZFHDE8VN3Ry1JUvKx1HfnuOBFvhxqdwJH+pu+PIy+XIWnTE+NCpn/B3HNlxpJZnffHxqU75z96x+F1zT/WOWyd/vj7fZuxc388YcMT6FQMU//I//cwb5+mHkpL2T3/Xfnrlq8zINdcd5MzMGnEOgLZpGttut48iSHqeB3b6rzrAqQECIhh1Xdt8Prd/+Zd/eSRP5AWJePmpAatzKaHoEj7pEXqsrusQqfPRQeSo0SEfdfA6V9ulaH0qN99f/T0G8vpdbU7/Me7YZMqfT0EX+psHCk9EdV0HWRIB9CkCqn99gDTNjs9wAUoqK3Si6Rr0PZZHrfJTW9dVNi8vvxOWBgp8nafk7nFKJ2ZKnip/LWq7jEXtgnMYR2ySEeuXEp0nZ7UHb3candU2Yr6lffff9Xra8+PyeNWVLysdR35bjvQ+8BSOpJ3+oN9a3eo48jI50myfZKQcCe6rnlRWHUdeBkeeK8/64uMYMMQIhUGoE6rR6vlqACpYflcD8NG2GHDSngfyU5MJDzTc5bP0T33eIRWA9KFJNcRerxfy01Ve/h/X4Uzkrh96GK5VR/TpAgoGvV7P8jwPgAJwhsm1fAfQ2VCDdgaDQWunKc03J+JH5EEjEErQCqTeydUuaJN/1EX/yX/X9zroX3VGnaxgN+q0qju1PY1I+gkHMtKJjLbF9fqbjySprSkQaL1KNpwTi86hJ4qSCr8hKyYg3g4YbyC5QxOawqq2ouPhO3aiulDfQa86sarrOuhS9aK2o+Skfqp2SF98HrvKX21NSU8ngYyL8ai+dPLiJ7p6nvZP8dFPnjlPx+ntCHl5+9PrFeO83Wsb6Fl/V1uk6Ng/RShdiZeOI78PR7JJAdd+LUeG0TXWwr2OIy+PIxPs5hBA0Bs3728dR14eR54rz7bC5SMVFC/EGGBnWRaWUP0Svnc0JQbflgKVCpeHbDEWv8zLtTGQaJr27ksc7/V6NplMArBj8DgJMlFQpW5A0Gy/5E+6AQ+3ah/0Rkg/0/c0S1sOpIahzqH18U+NlMijlydGxMPFPjqj9dEvXtTIP0hQ5a1OrKCogEc7mgqC7tAnevEPPapTeQdUB1bH94RHIULoJzUqK3VoxqSg539TgEfWZu13lCiIe9noOQpA1KFEzHX6LhOdwAwGg/ASUvRHn7BVlSuTGD+x0920+v2+rdfrAFia5qCTKSLECmoK4HrzxzMIHqwVeH2EEd/gPK9j3TpYf2f8XF9VVZCZ3nx4/dC24oCfJHnc8ViGTsAqj0u+v2pXHPeTJD+B0rZVXsjck4rW1ZWvKx1HfluOpE7146dw5EFYJydaHUdeBkcyIla4Yhyp11I6jrwMjjxXnj2lUJWsoOMFq0rlO5/5rp/1LhWBqIB8BEbb1N/I3dZIFvUBShptou5erxeADJDY7XbBgTabTWusvFPDExuEw/fZbGb9fj/spKTKph+0w00R4GZmllj8+Rs1LIoHd3RkZq2cYAU2BWWAmTGqoytREU1T4PHGTVEC9KkmnO91q2Sz2WxCNAo50LaXRyyCoXqJHde61Ca9/HBCdc6YYyq48bJOjWgRKeU4kw+iWaoj1TvAqKkl/FNfwIawYYiW9r0f+iiw7uxjZq13saD/sizDTRtgTX95p0lMHh4Y+QcRKUDHQFzlDVGpT/mJKfryabpqI9oXjcByPf1CNt7fkD3X0ieNrsVWjzQ9CYxClkpwKku1PSUmHTu/6e9+3H5Cq89zfIpQunK6dBz5bTkS+epN4ddypE6yVfYdR14eR1bYi/CCmYUbXP8OuY4jL4sjz5VnTSnksypZlaGd9ESDIaqAFCBVAB4Q1DAQtObK4jxme+Pm7dIedFG4dzSz4zKvkgEPbabpcRtaVgnMLOyYtFqtwnclP2QDqBDFpO8qEwXtXq9nKf1Ok0BsOBtyUwDiOiVglqV9rrNGxRRYcQQiKRg6x3TSrvrxEzUdP/rXqAv1aFSQOnUljgek1+u1bbfbkALAeHXsCvrqXLqqpxE1Pc/bn9q6n+B4kGNc6tRJkthwOAykkWVZePZgs9lY0+xfzjmZTGw+n9tqtQqpNR40tA+azqLEpn7ix6IPc2dZ1vIN7EkBB30AhkR0IYnxeBzARx9MBwjRn6Yx0F99R4najU5adrvdI6xAFoAudq+TLsUZHS+yY0yKYdgLNqKyRM7YGXXoZAlfpB21P/UH/xyA2pCOnXoUt3T8Kjcdu07YtI+Kz1q0H568lfy78vml48hvz5HIwm9w8TUcqaXjyMvmyJ6kACpHZllmo9Eo2HjHkZfJkf5cLc+6SyGDM2vfmXsH08iY/o4R0mEcjd8VFGLOrf1QYzGzkI+tkZKY42vkToFTt1Tlr+6IBAHhQDou3lauTq136kpUvOcDoKFO2glgwJhlQsxYe71ey+ExajVET/Aa+SBapFEN1RkGrOBFUbnruRp5hvP5yQAAIABJREFUoy1NG8FZAJRYRKWu60DWOvHjOoBPnUBtxU9ukI8u22+32xB9Qhbk2+u7TbB1BTxAUu2K8eo2swAqkcckSWw+n4cXZ+p7atI0tclkYrPZzMqytNVq1Yqg+ufwNG1ACYWJg+oEnerEQEld8/3ZCne73YU28RUiypCgJ3V0pECkUSH6zTnUrRFI5DkcDlsYgBx0wxeddGJrWqf2CZLzN5e6sqckpO36iY73L/rCJFHloeerLaFHjSwydsVC+qATVMUwrY9x0L4SEL8plp7yF/rZlS8vHUd+e46s6zp6s2T25RyZWHsS1XHk5XLkjvmW2JfKmz53HHmZHHmuPNsKl94ZIhyUqSSgg/JkgHI9wfOZpVraImpAaoIqGyOhLp1MqpPpcjH9Q/lE/NUI2eEIAOOzLifTN1IcMFIUTztEEc0sPDhJ38hP1UgW44MoKPSBdjQPmePIXIlMScBHAWKRbCVa2qqqKuhMJwMAi6ZAAKhch17UKVUPGDO501xDpIi21YmpXx1SbQ/5qKMBskrGRMrQqYKdAgjyihGrghN9xjYeHh7C70VR2GazCe9B45rhcGi///3vg21MJhO7urqysizt/v4+RKBpV4kUu1ZQ0Jtr9IkfrFarR5M8BUQLx/bHR6NRsCGuS9PUNpuNjcfjMFaibQrK6J1JGvKjbxpt11QCzkPPnKOTQ32WQImcCRV6QQbj8ThsP037noST5PjyVNrTvHa1A09amkqh4OwfXgcbsFeupW+eSPis7Wo/GA+RTPUPLT466QmTwm/dDdfXlY4jvz1HchOAfT+JIw+uoxPHjiMvkyPJLjJ309A0Teu9bx1HXiZH+uNanvUZrlhR8FSwUsH7O0lPJihCo2L8w8n1GSQ1PgABw1WjVkD2Y0F4GM1kMnlkyD7dwN8Fa0QFZWIUAMRwOGwRVFVVtl6vw3eVjY8OmJntDlFBooZ3d3eWZVmIcmDUeZ6H/uFo6IbziGDoGDTyoeNTQvJREY3QqRH7CIPKjL4o0CuQaX9wsO12a5vNxjabTdgS2JOnj4bweblc2mazsfv7e1sul5amqY1GIxuPx5bneYgiQiTYigKI6gdbUUBXO0Xeu93OlstlSKFRwmHyQfvYCqkUjDtNU7u6ugppIjpJUp9TnUDK2IK/sfJEgz/hewG4D/n/TKSIbmp6j0adlVC8/yNDbwd8JnLF5ClJkhBZpg/4jk5e1VfwNZ4viUUIPf7QL8UYJoXab7/rl+bEM16izWon6JpJnz53om00TdOajOrEXP8xbsVaZIjtaoSVa7Qt7aMfP7bEuLrydaXjyG/LkUySdrudDYfDJ3Fk6G+WPhpDx5GXyZHWHJ8NpP/YFLjeceTlceQpnDd7phsuGkaRasR0lO90iGiXFw4D1+s96CmxmFkLnCgKZpoL6wGZ6BF1cG6/3w8RIwyv3+8Hp6b+0Whk2+02jI0onbapjkR/AH/u7olEonwFDhSohs7n8Whkd+/ubL1eW7/ft/F43DIg2sPIaUuNT+WqUQ/aR7+a345xaaRIZV3XtW2323B9mqYhMqUPlapRewP2kwcmDByHeCBmQAtHZQwaMWJ8gNBoNLI8z225XNp8Pre//OUvwbmHw6GZmY1GowDy/NMHvj3hIzdSL9DtbrdrRcDMLNRH3waDQejbbrezxWJhWZbZZDIJYFMUha1WK5tMJnZ7e2t/+ctfAqihd78JCjIFiAFgbICXZ9NfxsZY+oN+qEv1DoFoZNPbqurVT2YUA6jfbD/JImKGHnq9/VbN/oWoCqZ+MqoTSvTAzmn4/na7DePk+RVNp0JnmkKjeKV90LQkiAIi84Snz5l4v1I8VfzgXL1OiVPJVH+H9FXGYKaXPQW/RJaxiXdXPq90HPntOVJT3bbb7ZM4Uid4HUdeNkeu+kcuhBN0ou/13nHkZXHkufLkGy6NCJ363QsHB+ez1mN2jLogWE11UIDVO28PvGYWQF2jdHoO0ZTxeNxSclEUwakxeKJlHnD1zhrHUaMGTLQPfhx83+12YecafTiSv610hPogqyxrOZg6r/aFKChF7/T5rsbOeFSHntQ4h/4DNrrETVmv1y1dmh23Otblaq8bxg4JUedoNAoOpLnf/kFljYxxTq/XCw+3TiaTACir1SpE8haLhb148cI+fPhgd3d3ttlsLEmOW6Zrn8Pqj+hfI1YACn2A8Jumsdls9miygQzRyXK5DHXr9r/v3r2zJElsOp3acrls+aIHf/xB7c1sD5Cr1cr6/X54SHm5XIZob4hGpe13yCgJKEl7nyeiVVVVAFeVmdnxoXYmANgFsuD5Dt09SvPytW3GSeoJ6Q7Uh9yxyevraxsMBq08dbVBjplZa3z6N8v2D0nrBI+CDQ6Hw2AXSsKaRsP5yEwx00dhfdRRJ+CKP/gEstNrFDuxFb5T0BN994TTlU+XjiO/D0fS9+Fw+CwcaWZWFqX1JZLeceTlcWQlr03huUduBHUnyI4jL5Mjz5Vne4ZLQZZOKBBqbqgORDusd/Qa/dJzOOaJA6UpgNEn+qUgqIRVFIWNRqNWHd5JsiwLu8soWI9GowD0AGuSJMFYtF1IRSN49FEVTnRFwZBrkyQJ0QYzs94BvDQaQV+IRnAM0FUZ6vlqvCxRIy+vB43KcY0Sri5Hx/QOIABQ2BBGixMQiUS2RLaqqgp9HI1GtlgsHkVpcW4loRjBDAYDW61WNhqNbDqdBnLu9Xr2008/2Xq9DmTy+vVre/v2rS0WCzOzkNe9Xq8DoPzhD3+wpmns3bt31u/37erqygaDQbA3ojaAAgTloyUKekp8RJiaprFff/3VfvrpJ7u6urL379+3InLYIXWVZRns9ePHjzadTlvECyBNJpOgG+9n1IvPKCjqBK+u69YD/ZATclX/U3vyPu8ncDqx0QmP2hg4QqqBkgNRSraoxifwA8aH3fi6+K4RTaLu2HtsckmUTycJ+h4RJX+N4NE2RX1bZaMRf9WXTqKxHerw2yhjJ0rSiqHdzdbXl44jvz1HIhPs/ikcqbsCI+eOIy+TIwOfNO3nIH3fOo7sONKXZ3+GC6FzzAO+Cm00GrUGh/I4FzDDUHWJWonK3zzojQPnUK9uw0ndGMR0Og2K5Px+vx9SIqqqstFoZPP53Mbjcegn48D4WbrWu+jxeBzGzZ38arWyq6urICOWhxkLhoTiWeYn79fMrKr257x48SLka19fX5uZhQiHgj594nqcSUmBPqphamoH49bxeeJQWaAPBVNkT0oFS8oYPjLMsv0Knjqu9gcnoj0ciogZoKaRGyVMiBs5syRO7jEOrw+Gz2az8NArD5SSI7/ZbOyPf/yjvX792v7+7/8+1Ec9jI+b5qZpWnpnzJpLXJalrdfrkBqDvADXP//5zyEKiK8AzJ6ssdOrq6sgC6JXu93ORqNRS0ecHyZztk9f0Bt/7EztgoibkqdO9nTiwT+uZyJH3TrB0joVX7AP+ooM0CHPp9Auk0P6MhgMgmwBfZ0QK4F4EqLPfvIKAWs0USOE1KVkzUQTn/U+pUTLNZzDOGM3yPgq9Xp/BifN2mkRGhVUXOzKl5eOI78tRy6XyyDrp3JkfZBlv3fc6a3jyMvkyJQb6qT9ugLS7PR5qo4jL48jOSdWzt5wJUnyP5nZ/9s0zcdPnBeEiQAU5PU8vSPUu0glAr88icAV5BGYKoKl0abZP0QJkCghKLDRPyUpVRIPHRK1U0IqiiIAro4d4WNsOIGmM1DHZrNpRTeIyCgJKKi0DO+wMXxVV8G5cHj6iSx5KFHBg6VPTW1QQMBo1DEZG/ngGsGjPY2gUDR6RnQOXWiO8GazsZubGxuNRrZcLkPf/C5bgEuvt38nB9EglpiJ2ul4iIBgE0Rt+b3X69nV1ZXN5/MwxvV6HWyTdhaLhZVlGVIJ7u/vW3ZdFIX98ssvdn19bUVR2PX1tS2Xy+DkSZLYZrNp2YYCKTbkozAQL3KAeLGH+/v7lt0BZuhQbU8jYLormPqqRl/b0dtjHjQTAAVUfUAYIlJSQv5+EqhtEcHC5zQyroBOe2pbukpA24o/vAuIcTMhASewY/qrslL5EjXWl7EqrlAfPq0EyVixYyaFOhGC3LV9xQAlcIrWrfqmfcVlPxnUqKHaDbL09tiVfek48j8vR+rYzexJHNnUxw0rwPKOIy+TI0kaU1+AD/W5uI4jO4705ewNV9M0/3D26sfnB8XrnbqSDY6jEQV9QFHP95EmwIx/SiwIOM/zsJTe7/dtMpm0AASQ4a5ac2N1+ZD8dDMLoD+ZTMzMbDabhaXxJEmCUWFsLMkCmDzcOBqNWsDIg45ZltnNzY3N5/NAhGYWHiBVg8PQY3fR9EXfXQEwakQAoDBrvzsEkFIDVqNXwMCwcDrVCf3DadXx0SvpIoD2YrEIKyeApC6rowvkQz951giwB6j9jaTmuhPFgXxJRyA3HT1ix6PRyNbrdZAtADYYDGy5XNp4PA7HSLf49ddfw4OnmgaELbJbFNvdAs5pmgY7QY9ElubzuU2n0xZh4AMKhvgeYKhL/ICaAgdki140Eq7pPWYWonu0RySa39nKt9frBV+kTnzePzBPQUa0qzrQ70RWsWMPlGYWJhuAvfpPnufheFHs36XDDmvYAv2hHYhAfU0f3FbC0xe74iv6YDB1a/oScsQmaE/JkXGqH+vEQ/EVW9ebZfV1H2TR37mG9hUTPIFdeuk48j8vR3JDqT7wtRzJewiRaceRl8uR+mY31QHy0zF0HHl5HHmuPHtKoZKCvynw3zFkPpu1X8CrwMdgYiktOnicBqWRy212fKu97yeOosLUO3VPAoA2AKLRDRwLA+Rf0zQ2mUyCsRI1RGGawtHv98OyNG2orNL0+CLGxNqpJUQA8jwPedt8JzrHuIkSqL58tEKNWMlFo45cpwSGrum/RlnMLORGL5dLe3h4sMlk0kqVVGdV0KzrY65wWe7zrXu9Xoj2AfyQsI6Dh5KVFHFy5IpzbrfboI9er2fT6dT6/X4AIjYQePnypY1GI3t4eLD5fB4e0CYqiSMiI6K+9Im6lWwBIAWf8XjcyonXGyT6jGz8iy/V1hkr+tKorAKYgjNjMrMwQdhutyHioySGbdN2nufB5plkoQudiNAfiJ7+47MQsJ+M4jsQEBMNxgqZp+nxAWfGyEQmyzK7vr4O2yerDaK7NE3DhHC9XltVVbZarez6+joQKH2jH+hPI8v4DXLBzsEafSeLJwbkq+PXvqq9gBXYBgW56oRE2/N4jM6p41OE0pXTpePIb8uRTESR/VM40hzudBx5uRxpgoHYOoEBfAnu6Tjy8jjyXHmWGy4FD777zqEQCsDDwLjD9aCkgKZgp+0BrDiGX8LXKBCEpAQFqAN4GrUhegLwaiRJI1YsX6vTNk0THjQGFDEc6qAtjEqXwwFTcnKJdBIRM9tHEuu7ZSA8opBMmmkL44Fs+U11h+HpMwYYvoKcRgEYl5KwtqNEqHKA2IqisJubm5CzO51OLcsye3h4CO1TF9cBplVVBdCfTCa2Wq0C0X/48KFFtn6yg61sNpsQaWUpnTp1uZ5xoX/0zZa55DoTcSUffTqdBv1VVRWiyRqxpm8KAEqmukTPGDimuizL0l68eBH6PhwOQ194aSTAoJMfCENTOuq6Dtvx7nY7q5v2ygbj88v2mpakaT96I6eRRp0wMl7dKUojj5zXNM2jVCVIx2MMdWr0lEgsflYURSBgbVttBv9irESaqRvfTZIkkI6ZtUhQJzikvphZ0As6VkBXn1Ic9asXyAHd+/PVnxWXaVeLTuzBZz8Z6cqXlY4jvz1HMlkfj8et1bOv4ciAzXXHkR1HHvuiHKmrHMpjHUdeFkeeC0o+y7bwDBZjpMNq/PzTTiIQFQ5RD8DSL7VqZCk2eAxQCYCldBxYl/c1ymF2fImfOhVOQ92Mg3qU2FgJMLNAGDgrRsndvo6fvkKmXAuhKYHu/x5fDofBILfJZBIIgaVlHwFUQtXIHL8BmMg3BshcQ/t855jqlnpYWmZHI8aJvtfrtY3H4+BU6CZJkrCpA3aCTHa7XXgHB3XQR40EaVQWGeN8gDDpD4Ae9tjr9ez6+trevn0bIiVqFxAcOgXEFeRWq1VoDzvxfeVcjWQxRh03RW1CCWu1WrX0QloJzyZomgFL9ZoSwLgDkBxMnBVV/AhfpO+MCzmXZWmTySQAftM0tlgsWs99aGQrSZLWREpJAMJXPNCxA9T8pZ9q+5AmMmXM2+3WBoNBSJvQSSGEw3mKERqBVPImioq+ISn1ZWSnkzr6iq4VayBjxSD8DFloNA9f1O86eY/ZEW0hA+qn6MShK59XOo78Xhx5fFEuUfmv5UiTCZ7eGHYceXkcmWVHjFWOBK/By44jL5Mjz5VnCVfSEYShgtJOck6WHbeDNTuSgUbUFKiVQPx3PYbhaGREz8FguWvWPFnAWCN9+lJHnBiA5E4e4+Y6CFSdjnFTD0BPu54gMZqiKML7T3RZvKqqQAC7A1HQnhI1ctxsNmH5ViNRAAVyQgc4LoSiUQqNEGhURclE7YL+0naSJIEgkR2RKF0KJspFpAZgQgbYCnJ/eHiw6XRq0+k0bEOLnWEfOklgskOf0zQNBEedi8UiOODNzU2IDtLnoijCzkfIXseJnbDNLORP6o2+s0H7yTmQD/UAgrPZLDwPQQpFDCj0BlpvoBRwFGyJNHENUWlkYmbWOwApdgogT6dTm8/nZmYhL520gfF43HoomlQFJgZFUYTUAfwF/8JudXzj8dg2m00rGob8scuY7VZVZePx2K6ursIzJ1qH2jj2gS551kVthEmD2i6EjX+jS8UtlZ3ZcVc4TY9SvMKH6BORax9VRpcK/vSLvz5CqxNptVsf+cZHPJ535fNKx5HfgSPNQj+egyMP2uk48tI5cpDjqGZ23CEUnTFu7LTjyI4jKU++4WLQ6qQ0qoNCWTgHoKUdVuD3RetUxekdKHWgFKIvV1dXtl6vw3kYEMrA0QBJjIWxKWhyjYIThq5pBRAWfVXFmx23p2Ss2oZGLhmXEkRRFOEZLmSDAWiuuy7HaiqQRmNoH+PEgQEMXe5nnKofJXlPdKojnMsv+Q+Hw0CuRN806sbYAE/a4XpIh12bkmSfoqDv26B9xseNBWMhjxkC9ZGoLNtvEfvw8BAiNNg5xJIk+yXw6+vrMAlBx9jfYrFopcB4+8AG8AvSN3a7XRgHBLZcLkMfyJXX6DTAqxEgQDPPc3t4eAjncR2gNxgMwkPsPnI7zPOQKoBMeTB4uVy2dI+dMy70PRqNQhuqnyRJWvnZyIUxQMZMPoiI62RPbRRbIt0D/y6KIuhWVwsUoCEBHQPnE8VTIKcdZK2+rH5GO7ShKWJq2/iOTojLsgzRQfqhvqx+o5NVxRd+17oZv+qCgo3p9658Wek48vtwJH6iq0hP4Ugzs+owme448nI5shCMZ2MWdAivcXPUceTlceS58iwphTSK8pRgfKQAY9dlSwU2lh+pl4Hyu5KN3rVTL2ChS7lmx7QD8oo1UqYAiLPHiIpzMAQMhn5p9EmvgQw0GnDquBKuRreyLAtAt297X8d4NLJSdmPCsBkDTshxZEWbamRECNTIbm5u7O7uznq9nt3d3YVzNTqm0TutnygYY63rfSoH4KjEojoA2DSvHtJnXLzEcLlc2mw2syRJbLFYhNSJFy9ehIgRpMpkoCzLEDki/5qIoUY/0APfb29vrSgKu7u7a+V2QzzUD0g2TROO8zCx10GStHf/UpkwXo0OQliDwf5FlCznQwrogLQIjfiQpkAOOfpL0zTUxVin06mZWfBRBUn0Sp9JBSAKBYlxHhE+zieXXwFZdzxjQkD7+MByuQwTRcUHyBB7h2Swb7VnJUh8z0c5zY7Eq22hT01XgLywE3SmPoXvYRMU+q6RScavBKMROB2PTjy0eJ/jfMal44eA9IaAttT3FJu68mWl48jvw5Hc3CVJEnaz+1qOpFR11eLNjiMvjyPVznWTCfTVceRlc6Se48uzPQGNILVRjJBNHnjoTvN+EYoKTw2agSmZ+MiOWTtnmrY5DmgQxer3+y2lI3CiNrpEyWf+cievUSx1diUr7SN9A9wYK7JhdxsiXACQAi0RFDML29QCDhiutsmKBQ8RsxuNGj0Go2SMnIbDoY3HY7u7u7PZbGYfPnwIwED0C115+Sm5Uj/99QBMHyAQ3XkJ4CVqpNHFyWRid3d3Np1OA8ET2YKMiaSpcyNbxkg0FyDRaAyg/Pbt20AUpDoANER6iLTpw6I8bAqwcr6SJ8c1NUCBR+2ccRCtpO7xeBxkjn6IhpGecHV1FVIGsGFeSkm0W6OJCnzqT/yeJEnYAStN0xClJI+c9+hAvETMIB8ikGbHB2HR/3A4DHn2mgJSlmVIF1mv10EeyA5/VP+BUElb2Ww2wb704Xf1ac1x18iv2ozqCV9WfVGvn3RqnzQyzvg9fvgJO/3iZaf4DxihbetknOuUEHTir+coNioWq0105ctKx5HfmCNlwslE/6s5UrAYXO848jI5Evuypr36qTc2+HvHkZfHkefKs24LH4scoQiN1NV1bYvForVsqBET7TzLzr3e/v0RAJa2RfvcXXPToH0AkGJgj+DMLLyjYjqdtu5y1bEwam94GAYKwYk1WgiRMlZvyF6hOKreSZtZK6WQgsMjU81FTtM0bNep+eBKdDiM9oM+j8djm81mtlgsAnCpYevyP47sJ++67M1fTW1R+aITyDvPc7u5uQlAwIOZ6BtSxvnfv39v19fX4aFTnVwACrPZLIDAbDYLOc/sPMQ7QsyO271CxAr62CR2x8spGZfaiZIBgER0EXny/gtsj6gVETXsbTQa2Wq1sjzP7fr6OshPJ2HqA+Px2JqmCeRBtJKo6mq1CjYACWADOmnQSCo6apr9w+68FPP6+tpWq1Ug5Lquw65HfqLmCbZpGvv48WPQMfZ4c3MTiIMdkUiBwBaURJUMyJPnGQPO2Ww2rZQdzqdeZMCKAxMtdrXCb8ys9RA9kx/GoxNc1Q/PXNAO0UH1Z1YkeLicKKh+Vj35G2V+10k6GKYEE7vB1uv8OV35stJx5LflSJ8mSB1fw5GUujpGuzuOvEyO1FVa5UiPyR1HXiZHnivPti28grIOQKNPLPkreBAF0vd8oFQIQXcQ0vZwbF1W1CVF3d0Hw6NPCpb0F0cCsABfzqFfvd5+lx8igdvtNkTKNEpDferQZhYcgndhAIS6NKwpFIyR/vptunUyjKz0oWhtU7cK1cghE+mqqsKOM0mS2Hq9tru7O7u+vraff/7Zfv31V5tOpyHiVdf7h4f9g5v0geiRkrJGN4iqcJ0+HEn/ifq8fv3aVquVffz4MURi1JF4UBTy1iikAh+AB4g3zfEBZLW5Fy9e2HA4DC/bxPaIFs7n80eRECJTq9Wq9RAt6QmAOdFrjdqoPjWqRcTNzEK6SVEU9jd/8zf222+/hQeMqZs0BwUCIomQ63A4tNvbW/uP//iP0I72kVQGyL6x9u5pRNnW67UNBoMQUR0Oh/bbb7+FXZc2m41Np1MbjUatyDh56Ni8Eiq/F8V+O2Te61KWpV1fX4frJpNJ+AxRaxoLxKBEvN1ubTqd2nq9Dg8FI2/sWSc/+ryCgj/1xSYqHqxjmMhxfAZ5aJQUX9Q8dsUCzvF1M2Zkq3KmnzoRVDuJTdDVLj9FKF2Jl44jvz1H6qT3qRzJC9/RXceRl8uRzeiwfbgdV1vhHfWTjiM7jvTlWW64NAKFg2hUSIFR75J5+/hyuQxgQue5+0Z4XI/y+Y36VAiqEE3J0CiKLuUjKLNjXu5yuQzpeiiaOngvxWQyaQGF3pWbHVM1fPqFEoNG4AB5vZYxECUgQsh5GDd1QWqMib5g2ETeyrIMoIPBbbdbWywWwYjK8vgA4ps3bwKIAPDkyPOAKLJSp9YUA84hKqMOQI55v98PYE8fiKRqigFgolGoNE3DFrKvXr16lCcOCfz666+hferTyYg+DEp/yWnHcXkRo+Zwm1kgaICqLEu7ubmxjx8/htQLTZdhRQnZoXNepDgcDm06ndpsNrP5fG7T6dS22629efMmRMhWq1V4mJkJgE7cFAx7vf0D8tgC/QeQAUH8OvjcYUUV0Oe33W4XZP3mzZtgh0QVx+Nx8A2VaZoec+KxFY1esm0z/tg0jf32229WVZXd3t62Ipx5nrfkDjFoG4yV8eouXUxMFbPwG8iESQYEgt2qfzF5mc1moS58RSd6RAZVvpoGhR9g/3oTrJMQJS6NfCJHjfiDIdiYkojiN332qyiKOV358tJx5LfnSH3PD/L/Wo7knPFkbHZYIeg48jI5ssQeJcCg9szKaceRl8mR53jyWTbN4I5bj/nIGMdQAMvELBkDBhiXChlDVABAIAhBDcHn/+qSK+2og2jfVdlFUdjV1VWIwIzH4xDNu7m5seVyGYiEvmlkUJXFDQ3GAOhrXi0P9uodfsvR5eaKUjsC5XfAl0gYMtCIAVEjZH19fR1yjJNk/1Bmr9ezH374wR4eHuzu7s4Gg0ErugBIA9SLxSJcR9SJ+rMsCw6NLKjPzGw+n9tgMLDpdBp0myRJAGbGw3hHo5G9f//e1uu1vX79OshzNpuFvqEPcrb7/f07WVjunk6nYZlbSQ/w6ff7dn19HW5MAe/ZbGY3NzdhNz/smgkIBLPb7V/yiEywe/QPOTBWTQcws5D/PZlMwvsveFiWfH7SH8jP14kX9TKJQHc3Nzf28PBgi8XCXr16FaJ+TOIARmxkMpns5X6wJeyZyCipB/gJxIWu1e90IgjR8fD2ZDIJPoPfECVl8ok9MHb6qFFwMwuEMxwOQ8QdWafpPp9+u92GZwrwCX1+RSfAikv0HfLSSTR4pA+A43dgIf1EV5AWtkr0k8kR/1S3OlHVyTL90THQP45xLmPW6zVqp7+Du135stJx5PfhSE1DQgZfy5HI7vWr19bf/a7jyAuI4zY6AAAgAElEQVTmyI/5ngvTg3yxEVJxGWPHkZfJkefKs6UU6t0kAgNQUC531Wbt3URQJMbOgMza70cgGkJdYRCyNBj7x4OJPpKm4K93rPTBrJ0HjjLI5QU4NeqvJKW5o/yGIxINYOyMjfQ+UgQwBpz5keyT445QRPZ4UFD1gMEBfIxd+0CUQ0Gedm9vbwMwmO2jSkQyRqNRiGyxaxT6yrIsPCCLc43H45AWgX6IUqRpGtIMzCw8m8BnXerHCc0sRLDqura/+qu/CqBNSormLk+nU8uyzFarlV1fX1vTNCHKU9f7t6yjp4eHh2DjODq2St9IQ9Bt1G9vb20+nwfQJnKJP2B3ClyQrdoEhMzWw7/99luIHK/Xa7u9vQ2A9vLlywDygO1ms7HVatWyz/v7+7BF7Xq9tslkYjc3N/bLL7+E9BV8BQAMUZzq+A4PjhdFYR8+fAj2ANAyVkBbo+4AFPaEDrJsv71wr9ezxWIRUofG43HISecZkjRNbTKZBCLCzna7XcjFB9Rns5nd39/ber0Ozy0URWGvXr0K6TBgg4/8E3UkyqdRN8UKfJ9z1O88jlD8qoMSk5IO7emEGDvUibWPripGolP1fbBPU0zAX43yeXzuypeVjiO/PUfSb+3z13Iksu44suNIuDAxa3Gkpjx2HHm5HKl1+vLkGy6AA6XTEb0b5DMDZ2kXA0WBOI/Z8cVw1N80TSAMjW4BoBrBY5ceoi3cJeMACA8FKsGYWYiEaLSfKFSv17P7+/tgsP1+vxVBgfjou0ZRIEb6C6ABGCgcY1TA1XzjXq9nabIf6x//9//Vmqr9bgCNlPJdjUdLkiSWWGKNNY+uQTc6SaAN1YPq+FXdmCXWqpO+JunxQV/qNjOr09SSprHZoZ0mOW4JkjaNNYe+7JLEsrq2cdPY7iDT2UEuVZJY71DfXOwQZ12mqa0SyeM3s6Rp7L2LZvB7laS2To/L0N6J7l10ViMs6zS13cEGdmlmVX2wqzS1uqosaRobiJxJ3Omr7BszS5hMpLbK9vIdH3xg0DTWbxprssxeH8ip3+ubJWY5QJIkljeN9SQVoElTm1SVFVlmo6qy/1YUluZDq9PEXu6K0KYHo+xqjMG0dntSsFTQ6fV6Np/Pw4SE1I3VamVNc3w2Av9iguCJoq73zz+QTkIkMfjBgSzQG9FW/G0ymdhsNrPJZBKImIeAN5uN/fLLL3Z7exvSHZj8EP3WiR/4tlqtQtoQvqx2gP8TfVcQ1okj+EfEk75BJqoHjepT8EfIG8LyBOhTHXQir36iBKVtKMGAZV35/NJx5PfhSOSMrzPuXq/X+h2fYRLGjQ39TISP/uF/+d8srQqz5oDTiePIJMKR1pg1Bx1bY01dh5fmmu25Mjm85iVN0ihHpgf8r+va0iTdn3+AAniW8dRVbY0dbjQbs/LvjnJpmn1fev1eaLtuamvqwyrroR91Ve/HVh828rDGmgO3W2NWN3Xgdvgt2RN/6FeSJmF+UTdtjkyTwyppXVmWplbVdZBRVVctvNINwsKxw1xC5xlZevCBqtzL0Y433GVx2JCm3wv9Qe5N3VhVV2Gegg/Sv6Io7P/Oc0uT1Hb/w86a9LhTIyuEGgDA7juOvEyO/O96w2V2TE/Qu08a1jt8jXwAsv7OlvQAjIQ6EABGpQNjiZsHLbkeYCWawR0+USSExfmMgwihph/M5/OwbA4pqBMRkcE5dOxmx7tnlskx6CzLwrsgrq6uggERKVIi1PQSdDr8/evnUOF3KYn7HJvK6TmHexDF9NY1nPv4rQvxY2Zm5Ynj/nyflXsqS5e+Ua/WX8k5MZc8d4z+ZJFzvRPrwnYS+T2Tv3xuIufFyjDPbddrby2ruxvd3NxYmqb26tUre/fuXXiR5Xa7DekU+MJisWiBY7/fD9FMoq28KJKiz1MMBoNQP30hVWW9Xtt0Og3pSHd3d2GLXQiOrWt5XoJIZ13XIcILhoA/WZaF6DKkSJ/YJMBH9jTYAQ4R3dfJJpikMuF8sERXG8yOE3ewleN5noeJtJKTTrwgIg2yaGRP+0X7bNzQlS8rHUd+e46kv0TSWU1L0zTsJMjzKqSn8dwKKVYhze4w8Vte//jtjKYr/2nKPHIszbLwnBo+yUoTONlx5OVxpK7O+fLkGy4GQO4sxqLLwXSSu8SyLEPuLxECs70xsWTaNE3YtWW5XIb8dB6URHhFUYS7eiKAZu2IIZ+plzthTT8gmrharUIkbT6fh8gFD9LtdrtAaIwR59KlR+7geWP4fD4PxHa8aUrCzjoomPc/MD5IFlmRw/rL//F/WX8w2G+H2u+bNY3dP9wfDDw/EO/QsjSz2dXsEGEs7P3732w4HFk+GFhyIJbisNtNfRjPcJjberUOOfcsvRdlaVm6fylfY7Z/aLrXs7Iq7erq2nq9XtjmtK4qG0/Gltjh4Ubbr8CMRiPb7nZWFoXNrma23e6sqkqbTvbOu9lurGmOL5/bbrf7nZUOW/WORiNbHyI8TV1bdnDe6hANyXqZNfWemFfrtdWH9Irtbmv1YSVwu9uF3Xvevn1rvV5mk8nUBoO+NY3Z/OHBBnlu0+nEhvnQ3n94b3XdhBz45WFjkeFoZGb7aF14QLkqrWnMRqOhPTw82HK5sqauraprGw2Htit2VhSlrdcrK3aFNWaWARJmlqbHSVFV1VZVpQ36A9sVO7u/fwhRryxN7er6ysqytFcvX5mZ2ce7uyCXXVFYWRbW7x1y4Q/2OR6NbL5Y2NXVlQ0GA1suFnufSo7bxC4WC8vS1Hr9npXFPqpVVpXl+cDqzR4A8TV84Pr6upWmsVwug00XRRFy3bEnUj6Kogg7mJHbv16vbblc2s3NjSVJYnd3d7ZarcLuUxoZJC2C6FpVVeFdOK9fv7bNZmNpun9QuNfrBT82O0bNmACORqPgYzw/AZEC6OALmKMTaCZvyBHsAMhJVdIHy4lmssmAYhOkB4YxYWcSToSxruvQpkbmwFwf/UdnEDD4QoHkOFcJpytfXjqO/D4cyaYCi8UibBpwf38fngHS3c24yWQHOW7W+Ps//8P/advyGAXPh0Nbr1dHjkwzG+QDK4vS0iy18Wjf5z0/96wsK7s+pIHBkVVd2WQ8CX1HZ6PxyHbbXXg+brvdWllVNj1wFjjiOXItHMkqCPJJksTKA5dkvZ41TW1Xs/3z2lV94Mjtzup6v+qw27Y5Muv1bDqZ2GCw39784eHB8jy3yXRqwzy39x8+WFPXgSMXi4UlaWKj4SjorixLm0wnVpWHTS9GI3t4eLDVYUMYVizZAZJUR7PDCl/N6lZqxW5nWa9ndVVZWVU26PdtVxT2cH8vO0umdn21DwC8fPXSzMzuPu45sm5qK3ZF8MWi3K9a4lML4cjFYmHD0dASa3NkL2m/V44gA3zUcWTHkb48yw2XbtfI4HAys3a+Jh3H4DAslKxv4t5sNuE9EDxUeHd3F87TAQLcLBNqbqUuy3JHjuABX4AEZdJ38tqHw6GtVqsQXaA+riFvvWma4HhJkth8PrfXr19bnufhAVSMcbfb2YcPH8LuQ7r8m2VZ68FHriGvuFnvLG1SW3+4D1GLq94+z7tvme3KypbvPu6NZL0LkYxX4/0kvV5t9zdQo0P6RLK/2ZnP57YhIjAaWdk0Nr25sc3DUVfp9vCOkm1paWWWVZVtPz5YOhpZvVrb9hAlucnHliRmSb1PP8kqs3q5sdUhP7pfNFZuS9uuVrYr91vW9svDsm6xteFgYE3Z2KBsLEv7tix29vDwW0i3ybLMfvjxxz2BlY0Vm6UNDy+fHFlm5Xq9f9fHtrRJ2rer2z3BrJvU+k1q1ba02+HENpuNzd/8Fh5G7hW1DbLGBqXZdj23frF39mq4j34WD/uI0rDZ33zeXN/YmzdvLK8Ty6rE/vznP4dJ02g0svKQvlMvN5ZUlSVFYZOkb4tie7Czvb2XB0Cd5rkt53t5v3rxwsqytMWmsNvhJADNdDyxzf1yP1F5+37//pUP90fbbhqzqrZitX/B5WCwf6nju3fvbJim1iw31hS13Y72D3J//PjRNofta+vl2tJezzbVIa+9qqwuS7PKrJemVpodb34PD4+/fv06gBwTJgCx1+uFiLGmUdFXMwuTS8B2uVzar7/+anmehx2jFotFmJwBcmxJTMF/2OQD3+RZAk0bUrwwsxCJY4IKSbGKAMEQWSe6yAPQRP118wImoOBO0zQBC5bLpY1GI7s73CgzKSH9g755wIfQkKOmQUB64JimSyj5K8Yp7mlahf5GFLBLKfzy0nHk9+FIbpaKogj+TmoU8mQSx8rWaDSyH3/8sYUTZVnaJMts2k9ss9nafD633Tyx/uH8ptlvsLBZPdgwy6yf9S2vD5s81FvrVdU+ZW3x0fLRyKrtysrl0pKmsevJHluSQyQ9SytLtkvb3t/bdDi0KytsXW9ttV5ZL9lPfnM7vKbjkIa2TkobJ6VN89SWZWHrDw9WkZKaZfbj7YEjq9LK9d5ePnz8YP2eWble2+ubGyvLrV0NUru6ut1zZNZYv9dYVW2tP9unlm0+vLH0wJFmhQ2TzKZJacVybddW2Ga3scFm/3zadr1/rmuU7W+Qb26u7c2bNzZLayvTyv785z/bwuFwnueW7JbWryorysImg8QWu/3E1urjLnf9ft9u8tyWy7llWWYvbg8cWW1sNhseOXI6ts3q3saDgVUf3trNzY3dzz8Eu8ubxqqmsnK9stxx5Isstf5uaQMr7OZqFDiyYIv37f69X9uq/Vw9E3/spuPIy+NIromVJ99wAXaAPXeldFiX6RAsOaa6vMq5kAlgy4Oa3LlzB43QmHizlLvZbMKOLUSxMBwMgJx2lIPB8mZuSIelVoxHH3RUMkLhjJsoATvp6AsBs2z/QOarV69ClIKlW+7iieJBIkQYiPgQjeBhZM05J3ownU7D7kA8aFyW+zeQk2PPmMbjcdj9aDab2XQ6tX//938P73MgcjYY7N8pofUSITTbO+54PG5tqwuQMC5eDInRrtfrsMxMfwArokI8BFrXtd3e3trd3V2oi3dm1HVtv/zyi1VVZa9evQrtEhnabrdhwqEP7w4Gg9C/N2/e2G63s59//tnu7/c3stPp1FarVYi8JUliV1dXZmZBrsjn/fv3Ybl9NpvZbrezt2/fBlDZrx4O7erqKqTL8GJGorjYAw/A/vjjj7Zer0MkkwmW5m7zcsGffvrJ/vSnP4WoEb6SZZn98MMPtjus7JlZAG7sCZDmZpYomeY56/I+edvY9cePH0PEiAja5BAR1QfLIZjVahXy1pEtOsev9RiEALjxDpfZbBYmT7rjErpmInp/f98aAy8pXa/XoS+bzSakfkAM5KuT9gQgM3aiqeAV+IKcvO6xeUgObEM2ZhbwAJl53KJoGgP9oy10pn+J4ClGc76vC//U38DxrnxZ6Tiy48iOIzuO7DjyMjjyXHmWbeFRNh3X5Tq94+c87j4BosViEUDbzMJL17wx9Xq9AM4QFFEsgJ/VHhSOMQN45KLSd66DKHSZPkn2O+mYWdiVJ01T+/DhQzDu29tbe//+/V6YveOWliiOupEDeeOAObnjRAfY5agoirCNLHf0P/zwQ9hFhqV2HIc7a+TO9p6QbFmW9vDwYOv1Oiwxm1nY/tU7y6tXrwKoqQMkyXHXI30Ikd1vdEmcyRnPBZA3T14wEwK2mkUOuq1umqa2XC73y/qH80gVmc1mtt1u7eHhwTabjf3ud78Lu2JVVRV25kEub968CWQCMLEDEc7+8uVLu7q6so8fP9rHjx/t559/ttlsFlKC0D85zGxZy1I9bZuZvXz5MkSDAAPsGCJBpkRIqAd/QvcvDitdb9++tbIsQ9RoPp+H96f97ne/sx9++MHm83mILKOL5XIZHjx/+/ZtSCcAJOq6DikIkAokzWSMyQ72ohHy+/v7MFHBx66urkKO9Gw2CwRAqsr9/X2YOBDdK4rCJpOJvXv3LpDW3d2djcdju76+DlvAYpek07AKoGkJROrwBdKGWBngYXy2CWbChB+DZ2b7CScpT9iqvkcGO9MIINcTrdM0Kgr64TcAnMk5pIa/IXclGNJXwEoIh3rBQSb0mi6BD2KDtMHvGt3EP7vyZaXjyI4jO47sOLLjyMvgyHPlyTdcdIwlxabZb13p7xIpAIwuYZKTPRgMgmNnWWaLxSJcA2mQ3gAh4XQYA/ml/KZEROQJo93tdmF7TYB+vV6HO2mMGYUSxXn9+rX99ttvIb+VLUUhApaIIbymaQJp7nY7u76+tlevXlnTNDafz0N/9X0c+q6IqqpChAaQWCwW9vLlS/vpp5+srmt79+5d+M0/IEnure5ARfSHOquqCvKm/6SmECXbbv9/9t6s140sudpeyXlmcj7nSCqpym4DbTQM3xrwT7ONegH7T7z/xlffVRuw4X7bVa6uKpV0Rs5MzmSS3wX7CQbVbXXJpb4pZQKCjnTIHPaOiBW59orYG9M1EwAATILNcrk0wGKcGSeuw3z6LjYECFhBbANmJZ1Oq9lsKpVKqV6v63A46He/+51evHhhS+YA02g00vPnzzUYDBSGoQELwH08njTo9Xr9QqoThqG1ks3n8+p0OorjWJPJRIXCaXNFAMonQa1WS+Px2Gzt9vZWlUpFj4+PmkwmxqRhUxSx5nI5C9ip1GmjyHq9bvNAYGY8J5OJyuWyMXZxHFtgzmaz2mw26na7xrhyENAymYxtVooGG5/DH/gs7BF/40NIIYIgsOBO8e3NzY3u7u4sCZHODBMJFgGS9r/T6VSHw0Gz2cwkPyR5+F0YhhcAwbN4xouxlWRBkM/zTL6OhPHd7XYaDocmywLQeWZ8Ab9gg1EYUnTvqVTKWHs/hqxEkFASJyQZYOPzXDMIAi0WC5O4kGgSF7ElxtWzcDw/8+tjEd/nc5wDG+Fn5kyS+SiAlBwffiQYmWBkgpEJRiYY+Wlg5J9VUijJ3holWQEbb9LchH/rZEJ5u8S4YVTy+bzm87kFGRgF3y7TD5rXa/oHZ7D5u1Kp2Bs37Bg/AxS8mR+PR5MjjEYjpdPnzisY/Ww2M+0pS7gAgSRbnvbANR6P9fT0pGfPnlkQ4DnjOFar1TJ9qzc2AHS5XBqwcL10Oq3r62u9fv1a9Xpd9Xrd2JrxeGxzUS6XDfwKhYIFeMaAQME8NptNAyVAHH19JpNRvV63+/D7ePi5IHAQeGezmY0hQESQRBO82WwuJAQADcEDpoMAP5vNDARwxMVioaurKwussH6r1UqpVEqtVstkNTwPy/qwIYC/l7uwHI9dzedzYyVJPDKZc1tjlsMJNLVazVqvem01YweziX1LZ10y44mWm2SAIun5fK4wDJXL5TQajSTJgly9Xtd4PDZ7QbJQLBY1nU4VhqEFR8+qEsiKxaKy2VOXImofeEZJur6+VhRFxrLCgMPcITNBugCT2Gg0TBKFfSAVgV2rVCqKokiLxUL1et2KfIkL3C8gQGDlGSaTid3ner229rL41Xw+N+DE3ulgRLLL+fE9ZGK1Wk1xHOvx8dFswBdD+9joWTviHhII/BuGMAxDY+kJ+gAw88p3iYOAALYM6JPU+gOmzvu3Z949C4gP8r3k+PAjwcgEIxOMTDAywcifP0Z68uzd46O8cDGAGI0HZwI5N8zDEijRbPIZjPl4PNoSK+yHX0L27CBOxSQXCgX7DM4gyd6EPait1+sLrSmDztKwdG6py7/R1RP82FgunU6bQfGWzdsvy/Gwaen0Sf8O80PwhDnyy78813a7tSVprkOg5jOVSkXj8dgCcqvV0mq1svvBiHibx2h8UEKugaOiO8eBfd0Ajgd7Kp2ZAGQCh8NBi8VC5XLZlnhht2gjTOAhWUAT7JeTMXi+U61WTbKyXq+tC9Bms9F4PFa9XrcaAekUsGFnOR/sYqVSMcDCxgCHarVqbJB0lghxbqQ9z54902g0MmZuv9+r0WjY92CKASrmj/nAvgBEgrkkk0CwV8bq9w1BODc23+/3zVc8i42NIzeo1Wo2BswnjCkFuSQ4jD9z420+l8upWCxqNBppOp0qm80qiiKzN+aRuSP4zufzUwev3/uhlxrBlkqnpGYymRigwLLxfPv9/g+YO37mvHyWhAdfqtVqJjvBTgEmzkO88Cwzn4P5BhioveE+YNCwJy8jIukmhvEcPugz1iRR2Wz2YjXByyt4VuIUvoodS+dkyMdh4su798B1OS8Jgh/b5PjxR4KRCUYmGJlgZIKRP3+M/LOvcDEoMFkwZwycf7OG6fBBO5vN2nJpuVzWcDi09pPT6VTlcvnigaWztp3JIKBlMhm12209PDxcGCjsCRPD75gQb/C8KbNk6N+aYbzK5fLFbtwsffvlSNgAjIBWt6lU6iL41Wo1m/xyuWwFs5VK5aI7C0XA6XRajUZDh8O5qxUGM5lMLAijBQe8JVkw9V1seG6WaxkPGCIMGPYPVjKKIu33p13SmQ8KbfkuwYWgArABRgS1xWJhYMm8Mn60UmXJnWXgRqNh2u3ZbGbX4l6QBgBiOMl4PDapAbp79tNgrqMoUrlcNtvD+QEg7ocAO5lMDFA8O/f09KRarWaaaBjD2WxmwRvGkLFHB49fAXiNRsPsKggCKzomOMxmM0VRpF6vZ+MnnZfl8U0CZ7PZNIYHxpJgA7tO+99UKmUsLfbmtfjYS61WMzui+Jtzorkm8cHukBJQGA4wMmcE8VarZb4Js+RZKM9S8UwkdLDIzKN/gYDx47m5X4CBayDlItnyyRK2iQ35+wDICfz4CZIjmE5+9isb+DcrAB7gmHd82cspOLy+3Y859uVlKHyGccHvuQ7JaHJ8+JFgZIKRCUYmGJlg5M8fI9+3wpX6H3/zAQfMB0HTg4iXIKBB90zSfD6/YH1ggxiITObcptIzTv6tPQhO+yNMp1NjWWBxJNkbP4ONA3sma7Va2bIpg+kZQQYag1yv13r27JkFFO4RIyOoSmftLMv2FFxy4ISetfKsWrvdNr23X+71+mbuwzOOOArLz9vt1kCSzjMYMu1x0StjxB6QcBicKJs9FWIvFgvrwFMul634FyCgw1Ucn3TisIF0AyLIZTKnYnHOsd1urbtRNpu15/DsrXSWttB5yjvhfD63olTOy3foEITNYp/j8ViSjJVDYoNMAJve7XaaTqe2eeHXX39twJDL5QygkFV4QOB+9/u9de/h3zDQBEi/s/p2u1UYhqrX68YGe0ZmvV5btyEABz0/kodUKqVms6nnz58b8BCoSC4YI8+qw5pJMh02dlapVNRqtVQsFhWGoVqtlur1utky18Au+H+fyHh/5B6CIFCj0bCWupyLOAEYkhgSIDkPtQDFYlG1Ws0ShHq9bjZHcCdG4cskqcQC7ANpC3GHcfAvJJ6Z4/mYIwAFGyqXyxdJFPfB8xIfPZuOj+OTsJQ+bvAs+JcHRy/F8mweenbG08vH+G5yfPiRYGSCkQlGJhiZYOTPHyPfd3yUphkwMdLlGyIPirMSgDKZjGmJfcDhISl6RBfLg6RSKZXL5YtN/aRzYVu5XLaOOLAnsCK0b53NZgYo3D9vpn/s7TmOY+tglE6fusGgM8eAYFAIhn7ZExYLUMQw9vu9SSx4ZgwIg/Ng6o0nDMOL5f44Pml4YX0YV9/WliLtdDptBdg4A0EijmNjTcfjsXK5nBV78jueD503OnhYMZyCfb88yGLUzOV6vVaxWDQWFF0vDgtANZvNCwcksB0OB9MXY1dBEBggE0QAvPl8bppfWEjGFbkKbJlPWPg3SQGBB2lPoVDQZ599prdv35qUBvumVsF3DEK/jlwAVoikxgcpH6z8c/qEBBv3GwNWq1WzH2okPGu8Xq/1u9/9zsAadhvfI7C9G1jS6bTtj4GtFYtFNRoNS0rCMFSlUtF0Or1gYX2Cw1xxDS/Jwd6ps6ATGb7KGHnmkDGhhoVAzT0CkofDaXNOxh4mmSJj6gVgwGBOfcvtIAiM1cUujseTfKZarf7BObBp/s+/tBAjuTeCN37Gs/F7xgFmjXMBJNg08YrvwjwT3/xqBckyAOSB3rOknh1Njh9/JBiZYGSCkQlGJhj5aWDk+46PIilkUgnADIBn3PgMn/OD65k1lhZ5oyYoMyB+aZlgHsfn7kvb7XmHdq5FtyLewgnYsCIMPgHTX5tz47xM0HK51MPDg3q93gVY4hyFQsGCIvcAI0UAXywW1uXncDi15oyi6ALMstmsXZ/AQNtSHIixRItOkOEcjCkO47vxrNdrdTodYxuRdDC+GHcmk9HV7zcZZg4IjjB+sK4UUHNfMC2MN22LG42GwjC0+ywUCrZkDsD75V8KVwFvv3xNEEV3TJCo1+uqVComVwCEfSKTSqUMXPr9vtmXTzjYa4QgyPK27xhF16x2u61vv/3WQECSsaewV1zDs68EwP1+f9HVC438eDy2Ll8AjWfYYKVIgLytE9BTqdMGjqnUSV5xc3Ojer2u6XRqTCCBcb/fW4D3HaM8eAEu+/3e2jH3ej1LgnhWWF/aFdM6l7kiUMM4cc4wDFWr1azAmfjCmNGBikDK7/Fr6iEIiiRS2+3W/IfjXdkTsi8kNcwPLDX245MNmGbOxUoBIIRO3dsrMcUnzf6ZeG7/t3/x4WcSS+adsSAWwHpzLz7p90BxOBwuQMcn1skK1//uSDAywcgEIxOMTDDy54+R7zs+iqSQm/MA4t+sYVr4f/87bp6bhi1hMP0SI+fACPwE+EFhif/d6/I9WAR+hhFh0pncarWqVCplQQQ5wW63s6XgbDZrTohhUazLvfFMdD1iuRtw5Pf7/WkPCZ4nn8/bcj4gwDI7zkSggNGkixJBG+cE2DA6DJl7QKoB+NVqNdt88OnpyX7f7/dtXGEbJdm+HTj4fr83FoMAwrzWajVb6vdspXTSXjM23qkmk4mm0+mFxINrxPG5vSdzwrPASHA9AgWOjwQD5yZhgP1C3+4TJja+5N73+9OGjvhTmNsAACAASURBVCQEkowN4/ez2UzH41m77IGG5IGf+RzJB2PqGbXtdmsyFtjSbrdrn/HJDe1xCYq0qW02m8aMUROBTxCg2JMFJpw/BGFAdDQaWZHvdrtVFEVWME1y4pfrU6mTFIHxRz5TqVQs2eAzkqzzF+f3cYdAh9yE//NMFj5FEgnzS72E36eEJMLLHOI4NruN49gkOAAbc8m88v+cG9/Ab4lf+BtzhQ4d/+I7BHR+9lIx/iZW+rjM83OPjBl/sDn+jV+QXDNmnqVMjg8/EoxMMDLByAQjE4z8+WPk+46PssLFTfmlPAaCIOFZGAI8AZZBknTRoQhw4u3eDxLsCg/MGzWDBZhw3SiKzFCZLO6NJWLewn1w47yAjn9DZ6mcCZFk90xAOxwOtg8BBZwsTxM4PIPJs3ANZCQYng+cBCACyLv7HTBWAAa68t1uZ4GFQNJoNLRarayjE0HwcDiY5IAuOz7Io5uHnfFADFvhx5nAzv8R4JFAFAoFawWLZCGbzWoymWg+n6ter6tcLmu5XFrxryQNBgNjbH2nHkBHkrULppUvLCfyHc4lyZbkkQMQpCSZLIFC2SAIrHNVsVhUsVi05MQ/I7IYdOvMGcEllUpZIN1utxdJznK5VKVSseSCufLzyR4sBINs9lQUvVqtVK1WNZlMtN1udXV1ZZ2YlsulZrOZ2Qgbf2I3sH4k2z758gwkQIQvLZdLNRoNpdPn7mzURpDseLDw7BA/+5a7Xo4Ew+vlCtwDY5jJZNTv9y/YL5h+78vYAckWYEA84BkZD3wde6WWA7AkXsH2+8DNuWDSABrPlHFv+A02B7uL3/tEmevyOWIQ9/wuE+5ZQh+T+T4AAvNPcuEBKDk+7EgwMsHIBCMTjEww8uePke87fvILl3979EyaH3h+B8jw1uuX7oLgrMn1elYcEkPgWjyoZ5BgY9Bro72lgE+Stc6UZEvVBBOuTRBEs8yb/Ww2u+imgmPzVkwAx7h5Y2dZ3S9f4gxopWEOfatPgjNv1el02t7uCT6MBX+urq4uAjHGhXH6oAnLls1mDSQwKs9ywGANBgNji2CQvOYYJ4LxQMMbRZFyuZzdGxptNnH0bAXngr2EKaETF915ADpADB09tQTL5dKeIZ1Om3yFuZLOnYn8viksl6NZRktNAGR+YV+wFcbRM2eMBTZAQJRkbA2Axve4FgDI87KTPEEBe4d1ZG5arZbCMLT5lE7gR8F5On3eY4Rn8npxfua7u93O2E3G2gdi5hHbWq/XNm+LxULSua6CczHWsLfYnGfPqUMh6FII7wMyCRnMNzYD24if+/hBfPLzgH8QXJknfMuzq+/W4nAvnq3jZ28fxDa+i794f/f3wHORlDMOgD5xgRjjf8Y2GHvm3AOQvxfiM78jTvnE1gNScnzYkWBkgpEJRiYYmWDkp4GR73vp+smSQi7CQ0uyn3kI/9bo32IJiBRwHg4H26Xcd6DxQOIDtmeGgiCwoleW2+lUBJMGm4bEoVqtWgErS7sEIpYxpXOxHZvM+Tdtrsn/4YQYAOwXziTJlrkZF7rXlEolKyAluHptLYyG/y4BC+ANgsB02/wehiOfzxvbEASBBV+KqP0yvZ9PHBXgY25gLQAR7glGB/20149TNAm7QZclAjHPk8lkVKlU7FrValW9Xs+cqVqtGrMZx7HCMLSxZ28RwAxH495xCsbFd99Jpc6tVwm4ki66VO12p85LnhmESYOZyWQyptNPpVIXBaT8fTweLZhJJ0Bhk0hsn8/D6vnrxnGsZrNp2vHD4WBF4Ox9Uq/XbRd6xp55aTaburq6uujM5MedPWeCILANNmGAYYi4b8ZuOBwqlTp1eGJ++JtxhHGHBUPOwfWr1aqazab2+721aCbWAHQE13Q6fSG3IAAyZsw1gRZp0rtgiI9zHZ90UBPi45dPOgBlQAP7LRaLF9f1HbbwQeILz4K9M64+WfdSs8PhYPGT8WScvdzMPytjzPl9EoovMHbvvgB42VpyfNiRYGSCkQlGJhiZYOSngZHvxYI/gRV/8vATIMke3A8GD+B/5mEIsD7Is1QJe8QDYkg4KJPKdQnmLNXX63VdX1+rUChcBH2uVyqV1Gq1lM/njd1ht26vNw2CQNPp1AaZt3ucAUPy7EOxWLTORYAADkUglaQwDO28LItjAEwwRuqZBL8MzjNVKhUNBgPbJBI2gTEsFArqdrsG5HwP7TKGCxMjnWQDMEHszTEajfT4+Kgoiiww4SwY9n5/2gvk7du3iqJIz549M0YSltWPG2DPkrVnUtLptLrdroIg0Gq1Ui6XswDIvGUyGZtj7MgHasZROrMVBCfugcAAwHrmlvHCUf3yvnc+EgzuBxtnw0PGHHYVAMfZGR9JxoJJsvPtdrsL9pNnYMNSispp6RrHpwJlzuHnhjmHIcS/YMBqtZpdg9bBSFpyuZzq9bolSlwLKQjjQoADOPEB33ENMCGxoFMbgAhbGsexBWgfM7x0hXnx7Ge1WjVGlWQslUpZLQJxiaCO32PLgBJ1Hp7twr58QTOHT16obcEmve3hP+8mowASia4HfQ+wMPp0rcOGYNm5V58cA0TYIcmJB0xJJiHjnvzzJcePOxKMTDAywcgEIxOM/DQw8n2k5EdpC08QYiJZTvUshXTuohQEZ80phsMg8kDvLnXS+hYjw2D4Huf1S4wMGsvpb9++VbPZvAjuFB4yWN4BCNbz+dwkCvl83tpl+p3QpXPHo2z2tOFgEJx2Q18ul2bEmUzmolAV5mU8Hpthvbt8GwSBgRa6cgIb48850+m0Hh8fdTwe1Wg0VCgUbMkUtszv98EbO9p9ZAYUXtIidT6fazQaKQiCi31hVquVaWwJRAQsgnS73bYAUK1WtdlsNBwO1W63jTk8HE6tbWHH0FZzj8ggvIyExIN7xOYIpt7ZOQByxpO5hrU7Ho/Gfi6XSxWLRc1mMzsX7BhFwhTsInNgeTkITnvDEPx3u52NAYwg7DLgDYNTKpW0Wq1Uq9VMxrPZbCzxWCwWxlQ9PDyY3TFvs9nMWLB+v29MmA9iyBFInEiCYAZ5Dh9cYQi5H8aJwESQ8+dg004vPwAwOB9BfDqdWqvcfD6vFy9eaLvdXtSsbLdbYyCxf38P2B1MGf60XC4tLgE8JGbMu2fJABgSxv1+b/vL4Hven71MBBv3NQbIInxHKII8DCbzQJctEh7P5uHvJKmAiSRLwBkLZDEkQvzh+YiTHpw8E4tdE5P9akly/PgjwcgEIxOMTDAywchPAyPfd3yUphkMLoPPzfIG6JdV+R1ggFFxozwgb+rL5dKWer0TYlgEUIIiQYriUwwMx+cNnkkMgsAKiVkeLxaLWiwWtgzLWzmAws/7/d66FLVaLZsQCogXi4VNAgwT0gGKDtHwRlFk98r3YQnZP8SzRAAgy9f5fF739/fqdrvqdDrGzDDuLH0DJjgQzATGjiMWCgXNZjNJJ1bw6enJpCUAe71elyQrfHyX8YDNaTabdp8Ez0qloslkouPxqGfPnhmb6bXegAdjzrXZvNPbFOdl3EhgYFu4R8adOUGbDzjh5JlMRm/fvlWn01EYhgY6JA5+6ZlAScckEpjZbGbBxxfuUtBMYOczFM5yPp57vV6bffogxsac1WrVkpXdbqf5fK7b21u9fPnSggI2QfEvCcZkMrGky2v6YbfxTR/IpHMROm2W1+u1qtWqHh4ejO2r1+sWpPkO98O/5/O5pBPLRhIBEHlf4t+woQBjKnUuMGbe2A8kjmM7PwGcJCCKIrN5fA8wAUCILX4PElpEPz09qVQqaT6fq1gsWutefLXf72swGFiCBADjw8QAgjfzwc/vsrwk6tgFzDfzQowjrnqWmWviV4wF8VGSzRFxmITo3ZWY5PjfHQlGJhiZYGSCkQlGftoY+VHawvu3fAaHhwBoCPYYBW+9GIK/UQIEG8exVOrPi1F5Fgvne3x8VD6fN33rfD5XrVbT1dWVfZeATEcfjJMl8jAMbX8DDBOAyuVOnWZubm7UbDYtkLJ07CcF9owlaAKLX7Ld7/e2FJrP522HdJ4dGcF+v78IBrAFBHsMkKVugiX3BQgsFgszqCiKLLCiEz8cDlawipHPZjO1222l02n1ej0VCgVdXV3p1atXxt7567VaLVWrVQvEBFnYzHQ6feF0GDS2Ip2Z3PF4bCBPoIUdOR6Pevnype0ez3mWy6XVE/glZt9KlXk9Hs8SAHTOSBA8g4KEJAhOWn1sF5BFZoH8A0YReYv3lyAIrCMUwRGb9kCHrcIMsiw/Ho8vNiYkqEqyPVWQA0RRpGKxqMFgYAGbzwOAm83GpAUE9Nlspu12a8zk4+OjptOpHh4ebH4Gg4Hu7+9Nm4/WG9CgKN/LCLyEhZoMAiPdv94FY34Xx7EVOCODQNpEkMafCoWCtdRtt9v2XWyE+WPsCfDlctl2tmdO+RmfhO1ljnq9niqVinq9nsUuQIEEiWSbg6SZ5yMhhx33qwqAJbZI7Dgej1bvQWLMHOAns9nMGH6ux7h5bby3JR/PPfPK/yfHhx0JRiYYmWBkgpEJRv78MfJ9L10fpUshLNB+f96MjuVWmAEmkDdGlmZ5UybQwJDQtQeD5I0VQ2SAuRaMBkupLDNjaIAcbBtvufv9ufg4k8nYzt2Ax2QysUllZ/lsNmtFvJPJRNVq1Sbs3f1FKKT0S9TpdNq6MgGAvJnDLAJskuz5s9msxuOxPQMtUDGUUqlkAZJAAUNEgOKZ8/m8nZv9Epiv6+trk1DgFC9evFClUtFvf/tb1et1kwK8W/gJ43k4HNRoNCzw4eTL5dLaoqbTaT179szkIjAOuVxO2+3W9N2VSsW0urPZTI1Gw5hFnp0lYQL1fr+3TRYBFD+WPtmhuJj5ms1mFhhhteI4VqPRMPkD48o5cTa/9Mz400YWrTGsUzqdNl05wQPAINh5CcrhcFC329VsNlMURWZngIF0TuyKxaJGo5EB2mQyMb2335keTT/L/PzNfKRSKWPQmOcoijSdTvXy5UutVitjwQ6H0w71JD4EWp8AYvc8H4GUJAQbBhBgpguFyw0/+Qzf8/UYvigXUOO8JDXEFeyWeWBFgIQxiiJjNbPZrMUHxni32+np6UkPDw/G4NF+mcQJSRS2QYIGyLI6IcnqI0gestms1U2QUCPfACwKhcLF5paeaeVaJLyeNWR14Hg8dwLDBolZfI6DWJgcP/5IMDLByAQjE4zEDxOM/Hlj5PuOjyIpRGZAwOYNzy+NcuO8QfJwBACYBxyLB4QNWSwWNuEURPLgOCDBmOXQYrGoh4cHlUolaxnKxBHI4/hUhMnSICwczGG1WrXl0uvrawssm81GT09PiqJIvV5P+XzeHDOTOXd1wXgAqu12q6enJ4VhqGw2q+vra2PeYIIwVMZlNBrZOTC05XKpZrNpYxSGoQaDgQE7zueXS3k7Z/md52XTxuvra2sdG4ahisWiXr9+rVwup5ubG41GIyu2huGEPfTMHY4SBIHJIqTzLuUEGZiEMAyNraFImxaujUbDwJAl+d1up0ajofF4rHa7bf9PO+N2u21tf4/Ho0kUcC4YKYIBwSedTqter1sCxDxvt6dNCmFECbKwkARKL3VAr8wzI29oNpsXCRGyBGwcx0VKUCgUTBeNXwDU2Bjyl+12q8FgYDKdzWZjLCoSgvl8fsHi5PN5C9K73c7OiZ4b2yGYZTIZhWGodPrc1hdwBEzxJ0ADGyRQwiJ7wEE3nk6n1Wq1dHt7qx9++EGvXr0yKQU6/3z+tN9Np9PRX/7lXxq4Pj092XP5cUT/TbEy15dkhdrYp68focU1Y0kiMBqNLCFLpVJqNBoajUamuweQ2XAUJpxkjsJqD4wkECTTBHIK/7lfQAY/Y28bYt98PjcbBKxI7rgHZFuAG/GGn4nlzLefQ88+JsePPxKMTDAywcgEIxOM/PljJBLNP3akv/zyy58EJP/8z//8JZ2G/CR6/aNfFvRvlDByXn/qAztvmbBdnBtNtQ9IJAMMUC6XU6VSsW44o9HIgh5sAcvavp2pJPu/3W6n6+trPTw8mK67WCyq2WyaY5bLZTUaDVtip5MQQSuKIpMv7Pd7W+I+Hk87w6fTZ219Pp9XpVJRu922pVi08xgDAZBroLcFQFnSRTvO79BzB0FgAW69XlvR62KxMBaV8w6HQ63Xa61WKzUaDdMy4zTIT3zLYvS5MAV0tCqXy8b+EBCQlBDokXVst1vbNA+nJYAtFgsrRpXO3WRev36tVCqlyWRiQXw4HFpwqNfrqlQqFvzv7+/NXgqFgjkKRbcvX75UEJwKbgF2NpH0hcqeMeZnWBs0w7BfPC/L2rDCnskBMKTTfhawje12W/l8Xo+Pj7ZUf319bQkBUhyvq4ZN8nIPQBTGEr0/40DBNEwb/sR38R/85O7uTu12W4VCQff390qlTkXJBDVYSQI1YOUZR0AVvyBZ2Gw2enx8vAia7AeTTqf16tUrYyZhfJFXIEPabDaq1+vW3po58+wff1PgDJMmycYIVjQIArveer22REiSyWk880v8QzPuX1zwfX5mNQLJDHKacrmsZrP5B/vEcD8kQshgqFVAdsGKAEwvz0IcxkZI/D0DLulCjpJKpRRF0f2XX375f38ScHxCR4KRCUYmGJlgZIKRnwZGrlYr/eM//uP/+WNY8FG6FHIh6VyA6QO716VjQL6zEcuidCHK5XK2RAqLwFvx4XAwNo0lfhzKazi9Fhy9KcYxHo//KNNCsSda8bdv36rdbqvX6+m7777TaDRSq9WyYALbFUWRDoeDJpOJyQcoviRQADS/+c1vbD8RQK7RaOhwOGg+n+vZs2cX58KpCZCpVMpa6fJWPZ1ODTxxpOl0aoDDmGB0MFpIBcrlsl6+fGmBbLFY6L/+678umKpsNqvhcGiAyvwhhWFJ2LOgMIitVssSC87FvUnSbDYz50Qnz7I2TAJL8jhAv983cCqVSrq5ubHAvV6vVSwWjREhwERRZMvvy+VS8/ncbPV4PBoLQ+tQOhS12227D5hA5gWAYjx4fsAhDEMLIARngMkzKX5JGufHbrhetVrV4XCqA7i9vdWbN2+sILdYLGo+n5tkgQQCUGMuAYsoivSLX/xCr1+/tsRtNpupUqlYgPVjUygUrJNTHMcmp4AZv76+Vq/X05s3by4SQRjw+/t7k0d5mRNzQlcwpAHD4dAKyUul0sXqSqlUUqfTURAE+td//VcVCgW12227VxIofMQztdjpdrs14CfZZQ4I5iStzDnP02q1FASBxuOxnp6eDOixYVpBI2GiVTTSF+IWwZo55/m8pIoEMwxDY/2YVxLyVOpcw4OP0CkMoAa0SGDelYl5lpBzIa0gtn+IdCI5zkeCkQlGMu8JRiYYmWDkzxsj36cC+SgrXJ4l4CbefRskCAAq/B6GhmAPM8X5GBAGie+zbIzkAvaHINbr9Qy4CC4sUTKAHrBSqZQtvUvnTfzQ9XI/LK0Ph0NVKhULaGjWMVRJts8DXZfy+byxEM1m01ijfD5vS9xhGNrzsaSJA3OgneVzjBHGl06fim13u51ubm7sjZ/7Z1z4/n6/183NjT7//HPTJqOBl07sGswB1240GhdMAGPAHGKIOKV3cjZgpCZhOp0qjmMr+KQjDsEaTTa1D/yOIAn7iNaXYCXJpDQ8J+OFU/uf+eOX+SnOZg5xYjrreD35dDo1h2VO6OSFzEaS2Q3/pjMWDBLL8AA3Rds4Mj7GUj6MEmDQbreVSp3qAjKZjL777jtLkKIoMoYS9gi2LI5jY009WGYypw0uKe6u1WrGppfLZbVaLUskdrudyuWy6vW6sW1ILZhz9sB5N7nBB8rl8kVihwyGxIfvfPXVVwqCQGEYWnAkCel0OsaIZzIZPT4+GiuZy+WsUxT2NB6PdTgcDLg5D6w0bNzhcLB9T9brtTGUYRjq/v7exoWgjxQCQMTG8RN8Ex27f6lBwuBlHJPJ5AJsqDPArj0Iwohms1m7X+IAgEkMelfCQdJHvOWZfp9sJitcH3AkGJlgZIKRCUYmGPlpYORyudQ//dM//XlWuDhYumMwYHBYpvMgwL9Zeo/j847rOBJv2wwEb/YUIMJuYFiwWwwyeuZU6rSzd7lctolmeROWje+gO2WJ0gd2llqZ9F/96lemYUdDvNlsbGnVL11yP9K5kHk2mxmLh+a8UqloOBzq+vra3vQlGRNEsIEJlE5L6v1+3/YIgUEgwKHNLZfL9lbOcioBDjZrNBqZjl6SdQ3CITAqxgUApyCW5WAcL45PbYuHw6G1BKaoEkaDsWWpHGYPNmM0GknSRTJSLpd1f39vz5vJZEzu0m63NZ/PrZ0u7NF2u7Vl7larpfl8bkW1rVbrQr5Sr9dtuX6xWKjf7yuOY9ukbz6fK5vNqlqtajgcGnjDfEpSrVbTfD43XXOj0VAul9Pj46OkU0IAg0sHqlarZYEX9utwOBhLMxwObaxIHLzU5OnpyZhbJBbD4VDFYlGTyUR/8Rd/ofl8rn6/r+vraw2HQ7Or6XRq3Zey2ax1TgqC82aq+Gs2mzVWCgYRGcLnn3+u29tb9Xo9mz+kJF6e42VSBGiug2xjNpuZVIAWtUhmfvjhB223W7169crsE7kEcoX5fK67uzvT3ufzeUVRZLUqSB0obMcvvKSBwI4MJIoi83XizmKx0GAwUKPR0GKxuJAr+OJnwBRQwv7ZZ4eECBujVkSSJbXU5gRBYKsckux+SGiZL+RZ+BAJYqlUssJiEhLmBIkLySLXY2yS4393JBiZYGSCkQlGJhj588ZIyK8/igEfY4WLwlC/VM6yqNdFM1EEMN4S/ZKeZ1UAFB4yjmN7eK//9JNDcGAg/AZ3BBaCLVpsnBWNNsFhuVxaS07ebll+3u12+vzzz401I4iypM1n5/O5bUiI3ON4PJoWe7PZqFarabfbaTQaqVarGXNIpyLGiSVZJAW73Wl39sPhYMEUBgpmAq2wdwoAgfEFvLPZrG3CR0DL5/PqdDoWPNiskmtgsBxor3O5nLUvJejB7ADCsKgwYp6Ngr3M5XKKosi+D8tFcM1ms2o2mwaijUZDYRhqvV7bc7daLVUqFQuEtVpNkqzgdjqd6urqypKXarVqkg0Ay2uHYaoAPQIhPtDpdDQajey5YVm4LswcGvHpdKr9fq9KpWIgR+vdw+FU/Alo4x+M2fX19YU2netJskSLACudQG4wGCiKIrVaLS2XS6s7APToZhTHsUk5sLnZbHaxYebj46OGw6Fevnx5IQcplUp6fHw0TTrJVbFYvGD7AOaXL1+q2+1qOBwaY7rdbnVzc6PJZGLP9PbtWwuqjUbDWDI2WJVkDPtut7OEhucgmHrwgH2D8YdZJB5lMhkDQzq7kXRIJ5aNxBVZTjabtSBOzEFGAqPmVxdgYwEymEbqZKbTqcbjscU9AJnYyXOn06d9Y0jwPKuMbxKDPHvpZW/EHC9587Kj6XSarHB9wJFgZIKRCUYmGJlg5KeBkdPp9M9bw+UHAFDmJgiETIj/HkbKQYDK5/OazWYW4DzQ+M/hwLBQMBm73c4MaLfbmZMSKGEQPRMBwzafz+13pVLJmEVYjXQ6bYWKkvTixQt1u13VajVFUaSHhwftdjuFYWiMEAH75uZGNzc3evv2rckqgiCw9q1cYzKZXOiT0c963SwBGVYCY0ilUnZtus7wZg+gE7TZP2U8HqvT6ZgTA2KHw0GtVkvtdlv9fl/dbtf09pyL9p/IEcIwNFaKhIAxJ+h5kO73+8Z8HQ4H0yMDwOl02uYSaQpJC4zaarUyloiWuoXCaY+PfD6vXq+n2WxmNQjL5dK6VW23W2M4aHHKNQh42WzW2EvmA3BqNBrK5/PGnJFM+GJK5pEi8t1up5cvX9qyfKVSUb/fVy6XU7fb1d3dndkiwBxFkdk+MiE05xS77nY7ey4SFJbcveTjl7/8pb7//ntlMhlrzcoczOdzY5aRA5DAeA0zevZms2lJGq1wqQHh2bE9pBeSzCZqtZry+bz5Z6/XM3vqdDqqVqt6fHy0zk0AHMFeOreVZt7oYrbf79Xtdm2cASAv28JvsL1cLndRO0Fck2SaeJKnxWJhdSFPT08qFAq6vr421pxkya8mSDKZinRucUucPB6PFzKN3W5nshNf4+ClaMRF33UslUpZkkPySv0CiQEgwfPzTNgLTCkgtd/vreg5OX78kWBkgpEJRiYYmWDkp4GRXtr87vGTN1WBycHA0QzDqAEOvL2n02lj43j75C0yl8sZUNTrdXv75GG8Fp1zERhhUtCAr9drC1j8fxiGJkVApw7gefYPEGMJGMDjfsrlsnVJGg6HWiwWtmTMjuzcB4aZyZzatyIHYKwAmuVyqVarZdeRZPp53up5Tt9JCADCaClW7Pf7ms1mxoARfDkmk4kmk4ktZVMoi74dVqRarVqQZOxIBJhDHAfZB+OJJAA9M/9mmZnuUSytMwc+mEsyrSxjQ2calps9cxxFkTk/OnrYDpjR6XSqKIpsPuv1uvr9vjKZjHWVQjPMH+yTwEUSQqvTZrNpQAUjhaMi31gsFhZc3759q9FoZAWprVbLGCqYXaQSBPlqtaparaZU6lSUmkqldHt7q9lsZq1bYTYBFjYBhUUjKev1ehd1H9JJCjMcDg3wAaxMJqPpdKogCHR9fW0+iL8cDgeNx2NNp1NLcmCAmH8Ca6VSseckaZBkTBWsFLr0w+Gg6+trY9qRsVSrVUkyuRPxBT+J49jAj32Ams2mxRFaaWezWXuWOI7NZ5BSIY96lyHHfpFHYF/D4VClUsliC3GEInrPmPFMu92p61ytVlMYhhYzCPD4Nv8HQGy3W81mM41GIwVBYJ3hiClslsk1fUE340usTqVSxrbzvMRWxpV4kxwfdiQYmWBkgpEJRiYY+Wlg5PskhR9lF0uCOowdxoGx8WYKkDDQ/O13nMZ5YOb8hDMwvFmiy2XpX5ItgXrAYJkegPfW7gAAIABJREFUw6Arjm9zy3Igb9oYJW/rLMkSfOv1usrlsvb7vUkWwjA0J9/v96b3haVcLpe6v7837TIyhE6nY8vhtOjFAAASgiyaVIoHYbhYRpdkjCHyFKQh0vntHE0wS9jIL3yQhRVAEw4jx9jDCvCMu93ONMiMP2NI/QHSABwxiiLNZjO7J6QAMIQsPz89PZkTYjPMNyweAXg2m1n3K8CQOfYbRbIUzvh64MY2SQCkc9ca5p3xnk6nmk6nNvfokpH6EBS5ZyQiu91O4/HYujFR5A2DFMex6aklWWco5D+tVssYR9hFwJIia8DS7wkDa+f9MJVKGYtHUkTAwueQKwEe2WxW3W7XdnGvVqt68+aNisWiyS5Wq5X5DYwb40aSud+fWkGPRiPNZjNtt1s1m01j/ACmzWZj4IOtSecCaQ8myEy8xCYMQ4VhqFwuZ7IIur/1+31tNpuL5BgWEHAj+Ws2mzanJDpovzOZjAG4TxZhYZEr4B/UG2DXfjWEYl0COKCH7ZGU0aAA5pSEb7PZWCcoWMg4js3XiasAB2O52WzsM8RjfN6vwCTHjz8SjEwwMsHIBCMTjPz5Y+Sf/YULA8VpCXI8GEECpsw/AJMO2NABZrlcWuADjHjT5G3TM2G8rRKAKADl/5AlfPvttxbkmRRkCAwkg8cEAiQUKLJ8WSye9huhew3G6JfIU6mUtbTNZrN6eHiw1q7pdFrdblftdtvYL54JzTuOQuCDzYTphNmEgYO9yufzxnD4OfBMFKDB2/p0OtXt7a0Fikqlov3+vA8D88V48Yw4MveAwSMPYH5Y+iUYPDw8aLlc6u7uzgKwvzcKLzFqnJD7ZkmYe9rtdhb8YXQAK5aRr66ujEXxoCjJAI+/e72eqtWq6vW6XZekZbVamQa40+mYpAG9OvYDc5jNnro31et11Wo1GzdYSrrrEADYWwQWFRZ5Pp9ruVzqq6++0uFw0NXVlclCfLBnbmjFm81mjXXGRznf559/bteN45OeH+01DI+fZ/yV5AMb3e/31m4XFp/uXSy94wf8jgQF/wX8sS/mnDFbLBYW5AEOxtnLqQBJ7IBOWNI5AZlMJnrz5o3Vq6zXa9OtU1xMMkmBNKsCyDIqlYo1EiBpggmEccPWiDsAmLcpalLwEc/Ke6Y0CAKTOjDPrFAg7WL/GnyXOEIzBUkGLIyXT/6wOeIGtkximBwffiQYmWBkgpEJRiYY+fPHyPcdH0VSGASBter0bAgBkyU+3iLt4qlz4TD6XbqCwKayfMobP8YFW8SgMhhMuH/jxLiy2dM+GezJwLImWm4YK4ycweReuXcYGkkXjBb3VCwWbTxo0RkEgRVcokePoshap/JZxgQduQ+gABjXYnmZ+/RGTpEiAdqzDBiLZ6no5LRerzWbzbTf71Wv123JlAAOGwNwIrHA8TBU2AXYTYI+MgO0zEhFcCSCIAydB7PD4dStZzQaGfCyNA6z6dv0Hg4HK47M5/NWTIl0AOegrqHf79tGkAAyn0XCMJlMTG4BAwLw0iK50+noxYsXiuPYpDuHw0G9Xk+lUsnGjvFgvGFiANJyuWzBqFQqWSIQx7FGo5G++uore3YYrna7rSAIDHQIlKVSSff395JkjO90OrXNF7F39nuBsfKSChg67AkQR3YTRZHq9brtteKDYaFw2qMEtgrfRi6A7QKcJFz7/d602fg7zwxQ+ETVt6GmVgEpA0w+AN5ut41tg20EZBln7hMfKRaL1u2J56Ad77v+ToLISgF/8ztiD/cryfTgdGDCJwAUGh1gczDLJK20/8U2YfWpxUH+wM/+WQESkhwSa+Ku9KcBJTn+8EgwMsHIBCMTjEww8tPAyPcd733hCoLg74Ig6P2pk8DA1et1m2wOf/OwMr8/9wV4wMbwdspDEsgYOP+Gie6cSZVkoIQz0F2GN1CWbQlYnr1hmdIzi9wfb+r+bZp7JWAweTCNLDN7pyEg0vkmDEP1+30rasaBcW7AEaaCoj8YKgIorF+xeNrPA7aBP7AH9XrdGCpJVhw4nU6NKcvlcuac0in4bDYbK1CF3UEewOcIRAA4P2OUhcJpP5fNZqPRaKQ4jo3dCcPQtOT7/f5Cz0uywf0RQFim5zOMD8yDZxe32611DmLDS0k2tr4jFYmIT1h8IIO1rdVqtnyPvKBcLuvp6cmcNwgCa2mLAyNfgJEhQSDAE3iZX9jcRqNhGnU2sWTZn0QEfTg6dkm6uroyYN9sNsaKAw6vX7+2ccK/KAgnMEkyH4VpQmteLpcteSJQMt/4GzawWCysOJs4wd4ldDjKZrMGuFEUWZJI7YcPjNgG9w5bFQSBgTBBWDrt+0NXLfyiUqmo0WhYhyyYUuYMYGQ8OfdkMrFCZnyK+yGOlEol676FXwFGrFLEcWxjRzLJH2yEgmZAlLhIIsyc+USFuMb4EdfwB0DKAwXfAxA9Mw/gJMf5SDAywUgpwcgEIxOMTDDyT2PkewX5x+Px/3vf7zkY5GKxaE7lgzVBhUH0kwJTgN4U3SdacknGuODoMHE8MEbEIGCMLAMul0sz8iA47X7NcjAD5CfIg4iki6XI/X5vzA/FhhgQzoVEwE8KLCYFusfjUbPZTFEU6fXr12bYfsL9RPM7jkKhYJ1bgiCwXbaXy6UBDsHI/xsNNiwGy9+whLBxMHzcZ6VS0fF4NC07+4xQ1OqlJ+l0WvP53DZ+ZNmea/rxQqPMEjtBDycgiGPQ/X7fkg1aG8MWY3OwFYVCwZbW+/2+BQDGgOsw9iQdsE0kIPwO1sQzuZwHoGM/kyiKbDNCukRFUaROp3PRFQtQhJ3ygPP09GSgwE7ubPAIGNIR6ff+aiw5QbJUKimfz+t3v/ud5vO5adrxwW63aywf34UJJUFB+uKZHmRKaNozmYx1o0qnT3voAEbYGQkfMgKfuLAnDP4PS7bb7WyD0eVyqWazqc1mY7ZN/IEJZZNQZCqANIwfHacYuyAILMnwNoxcgqSMQnzkELR/zWQyJqHy8QrpF7bD+ZknAIKY4JMKYhp2zgoJgR4b90BD4gSYAPqSLmRr0uUKCM/D7/wc+8J0fDQ5Lo8EIxOMTDAywcgEIxOM/DEY+VEkhQwAwUI6d3UBRGCtGFAGjLdWQIa3bwYCY+HNlTdW3pC9htUvm0qywlQCNODFuVnKxFkYTK53PJ5acvrBZjmdQC7J9jdg6ZPlXcaAfU54S2dZM45jffPNN6YVJfACUjgSn+dZAErGoFQqGZMQx+f9Q9AQ06VlMpnYfWGIx+NRzWZT5XLZ5oC2v7vdzsaQ74xGIwM23x4Wg8P5FouFgQw/o5OG5SwUChYIuUan01Gz2dRqtbI54Br5/GkHd6Qc9Xrd7gsWEpDHYaMo0nA4tPqCWq2mZrNpvyfokkjQDQo2BjYYFtJLP/iO19xLsk0r6RJVr9dtrAE6NOiSTKbDcvu7xZlep04iBWhPp1PbM4QkIZc7dWCiSPTp6cmedzQaWXEr991oNKyQni5V+AfsLMlNFEVaLBYmyyAZWS6XxqABwsgMSGJg62GaCN7IFGD4YeVpe0288LIgGFv8lzkjEcCP0a03Gg1J0tPTk7HdjCtjyr3A7kmyuEWM8R3VkN+gXyfu8RmA3DOg3Bcxxce+/X5vwIy98EycF2CBUcafGBv+xj75QwJEzGSOuA9Ahet69s9/Nzk+/EgwMsHIBCMTjEwwMsHIj9JyioEnmPklbAI1IMGN8Tb/7j4GGFY+nzdnZtkT52RwPdPFxGCgfunRXzOXO+3tsVgsFIahMUSeBcRpYQLH47E95+Fw6vDT6/UUx7G1mIUFpBMPh++aw9s4z5ROpy1grtdrTadTffbZZ7aUWyqV1G63tdvtDHT7/b4VN8IQZLNZvX37Vjc3NwaA7B6Oht0boCQDPc8IwnZ6nfd8Ptdnn31my7s41Gg0UhiGF86LRhoHgjVFhoAMgiDJ3LFEz7i8y5QxL1wbJ97tdgYEcRyrWq3aErSffzobrVariw0bKbrudDrGHDPnADWbQxK8uUfAhiDAc8Kq3d7emr78/v7+AiAI/B5EYEYKhYJpymGquHecu1qt2lxiR55NpLvTdru1rjy+G5gk00ljI5wHX0YjzRgRlEh8guCkgYflw/6DILC9Lfjdcrm0vT1Y9geksCt03TzfeDw2FhO7wx4BEc+CAQYkV9gjic5ut7NrHI9H9ft9e7btdnvBfrOhJiAFw8ymoTw7Mii6g9HRzK8GMJcAFysV+L2PHd6+fHvkw+FgduL/j7hAXEKihG3yfNwbrCIA4lcLYL+JA5wP28KfeBlIjg87EoxMMDLByAQjE4z8+WPk+46fvMLFgKDN9O1oCe48LINKIIRtwIiks9bdM31+Kd0PAi1iuRYBi9/DmGG0vmCPQMAkMZAEJpyLZX0c7Hg86vb2Vv1+35aW2RxROhX03d/fWwEqoOQZwlqtpmz2tJt7u91WpVJRtVq1NrDtdtucgXElWHHPXictyZiv6XSq4XCoMAw1mUw0Ho+Vy53a/NKpB62zB2b0xdvt9iL4B0Fg7BCdhdhgkMANyONAxWLRWBfYHYy1VqtZMTaMHwE8juOLgkkAL5s9da0BVEqlkjlkOp22ccNusAcYv3K5bIwqS9gAOVILzyr7+b67u9PhcDBdM9dnHmDvjsejLf3zfE9PTyYdoXYAn2AefE2DT6oAdj/3SBQmk4ldv1QqqdFoGCOXyWQsqaNtrHTuUAQzXa/XFYahoiiyInD8BH9er9fW0Qv5Sb1eN6ABmABfzxwCPF7WlM1mbQ8NnhNQKRaL5guMC+MgyXT7+CtzASMKsBKUYfmoReH6lUpFqVTKnov7pgB3vz/XgxAHYP/YrJRVgul0amMLaLFHyXK5tKSVBJff+ZgpyVYxKCrnuYibrDp4dpOkBT8kbvmVDZI0YioMsHTeWNLHO9h+4oKPwfz9pwAlOf7wSDAywcgEIxOMTDDy08DI961yfRRJITeC0aO1JCgQWDDmdxk1Bi+TyVjAQDvKA+H4DA4sgmd24vhcjBwEwcWbr2fpPIOE4WI8nBtGgLdrgBKmYbFYqFKpmAY5kzl1arm6ujLdOcueGAKGg0N2u13T0t/c3KhYLGo0GhnQzOdz3d3dablc2u7sqVRKYRja98IwNDaRMYZhWK/Xqlar6na76vV6tnEggQ2nxwELhYIxK3RwIkDH8XlvAs+YHo+nwk863MCwSTLHZW44JwGUMcOpuB8cNJU67c1AYTDMMP9GZ75YLDQej7XbnfbsIGnh8zgv+8yk02kLDMgssEUkCLAoQRCo0WhcMHvUKwAMzAvP3263dXV1ZeD76tUrhWGoXq+nzWZjbWIJEj7QwPaQFMH2sjs77N58PlelUrnQKmNfYRgaA0XHJAIKiQy2OZ/PbS7wH5g2zwoiKcAnNpuNMepIBQASmC4SPdhGgiqJHn9T7+HZJ+LGbrfT/f290um0SYeokeBzxBRsgda62Cf2zFgAMGjvuVeCKoHeJ7okAJvNxqQwSF6oZSFWEXi9lIFEFHDwIEvsk3RR8wLbhn+QfDFf/rsAA4eXkBEjsfF32Th81McCP+/Y14+RTCTHHx4JRiYYmWBkgpHMWYKRP2+MfN/xUV64kCDAsOFski52jobF8iwbxoBjck4GHjYQA+DzMAoYNuwDLB6BiUFiqdEXCrNkzyaCMEbH46lTDPINro1DsefEbrdTq9Wyc08mE7XbbSsAxVny+byNRbVatRanABUBFiNik8tKpaLFYqHb21sdj6ed1AmajB1ORIBstVrW8vbVq1fWthYjJCDyXKvVSk9PT5rNZgb+bMI3m80MyAjmgDJ/arWaBc7ZbGZzy74HtPCEsUGf7fW2+Xxe8/lc6/VaqVTKmF6YD5wH+6CA83A4FY4/PT3ZXKbTaetSBCOKUwKAURSpXC6r0WioXq9fBHS/MR5sI+1zSQgICDxHrVazsc1kMjZmnU5HURTp+vrapBOAJAEYW2cujsej6ZpJfPxmj5lMxmwOtkySnQ+fAQB7vZ6ur69NfoKkJAgC20AUAAGwcrncRV0GgZjP8oyeMaJ9MddhPAjA+XzextoztfgwDJpvf4wv7HY71et1VSoVtVqtP7gvWhYjlUBmBauGrxLc6VJFjPGSAYIpoADLDHt4PB7Nt9m1njgDkwxjjL3gp/gQMRAfBnCYY2IpiR3yB7+qIMlAilbI+Ajx7ng8WuLgmXqYPB8f+ZkY6pNr5pmYnhwfdiQYmWBkgpEJRiYYmWDkR5MUspyNsRF4/ZKfD9YEBgKxf+M9HA5m9FEUXTwkD10qlazlKQDxbktIAhiOydsnEgSMiME7HA4XhYSwE3QP8svZ6NLX67VJAAaDgTkyAREnoi3o4XDq7uQZNMaRJWkC1eFwsK427JXhmQbkIldXV8b8wEhIsqVpAr1nNAj6nJMd7aVTAbPX3wKOsH+wlrS93W5PnbXQQMMopFIpa7cax/EFU8RyNsGCOUA7ThE58wX4w8gxjvv93tiwxWJhQQwA4dx+eXgymZid0q4YPTfsKLYJC4htMX6cE6aOlsjsIXI8nrTV+/1e4/H4IpFip3qCQDp97pZEUCOwMpfYEucBcNipHpmDt2NAEp8hAQCgCW7z+VyPj492Ds96E+hg2GBUYeoANcaQgIONEqhgakulkiU99XrdlvQpwgUcYacpSI+iyFhB9gja7/f2/MgD0um0tRRuNBrq9Xoql8vmN+Px2BIYDn9dzsk8Y0vH49FYc0AZwCb++ASJ+8GGJV1IdRgXYqekC1mMZ1gp1mbuYNM8yBBrScr5m3hAkiadW/pyf9wvYAUzGMfnVsHcKyxjcvz4I8HIBCMTjEwwMsHITwMj37fK9ZPR83g8ms4zCAIbXIwfo/MDQZGq15cSjDabjbFXME+0WAVUcECcAQCCPeGa/J5AxNKsfzsFODA2ZB71el3r9VqPj48ql8uq1+u2PN9sNjUajdRoNDQcDm2PAtgGv3M50gikIHTUORwOur6+1uvXr+28vV7PurkQjAuFgrrdrr2Jt9ttVatVNZtN3d/fm1wC46EA+XA4WMFzLpezzxyPR33//ffm0KPRyJipRqNh9438AelKtVrV9fX1hXSDZeU4jo3tYQ4IatnsZYE158SZ6LTkl+IJXHF83tsjl8sZI5JOp21vFpITnAMpDC2T2T+FwmFsI4oidbtdA1m03wD34XCqO5hOp/r++++N+YD5DIJAtVpNu93OluFJAkajker1uqrVqhaLhR4eHvTy5UsLyiQK2B41AYVCQePx2HzA66SpSUDyAQvG/JPAcT46HZGsEHxh1rGRzz//3PTpsIdv3761RMPPJ8lKuVw2hgjGFh88HA4aDAbabrfq9XqaTqcmIxiPx0qlUjZeLOMvFgsdj0erISARGwwGyuVyajabyuVyxjJjBwRnNkZlo1CkC4BUPp9Xu91WFEU2dzCfJBmwgbDg6MBZHchms8Zmf/HFF3r79q3FlVqtptVqZcGXuhVAEAY4n8/b3Esy4GXlAADKZDLG4PlxguGG/fTSMPyR50YmQTLBZ4mhJAGABSBBLCJ+AmgAUvLC9eFHgpEJRiYYmWBkgpGfBka+7/jJ6ImTLRYLeytmwnB0dM3H49EKPiUZs8KyOv9mEDBilghh/xhIlvq85lmSJpOJOVu9XtdoNJIk68iSSqWs+A4mjyCPLrXT6Vh7W34fhqHS6bS192QvEVrEeoedzWY2gQ8PDxfOh56bXcyRHbBkTNDjXoMg0Gw2s70oFouFer2etR3NZrP2HDApsGTD4VClUkmFQkGtVktRFJkhSbIlX3S6OJGXihQKp/0Unj9/bgwOIAGAMU6wODA2m83G7pu5Yl8KGB4SBq+d9jbg7QSpATpt9OCHw0GVSkVhGBqTSfvQ+Xyu0WhkY8Jyfzqd1s3NjZ6enmwZmWJbHJLnAazYZ4L/f/v2rdrttlqtlhVfw/jC8jDWFLI+PDxYAIzj2AqMmXPYMTZrJNgByGz4OJvNLphSAtJoNLKxI+mCWfIMNYkBDDS2XSwWjcGCjeQ8BDgPVMy/b9eLpGWxWKjb7do8TKdTK3qH2abLFPeUTqc1mUxOAer3IMk4hGForFocn/b6QFpDTICp2u12Fww9sQUmar1eq9VqmU0Qr5Ag7HY7XV1dqdfrmYSoXC7r7u7O5hWglHTRGIA6hP1+r4eHB7NP9nchCSZ2Sef9jqrVqgaDgTGoXtYBUHkGmX+TjCFRYr8bZBUw1iQUMOT+JSqdTl8AHjaK/76PvUuOP34kGJlgZIKRCUYmGJlg5EehK+M4NvZhNpvZEuN+v7dJZiJxSoyOgMkDEkx4o2ewCEBcj+9JZ8NmczUmh7dgAjJBF6kCnWQ6nY5t/ocDwTx2u11JMh20JNtxm+Xs0Wikv/qrv1KlUtF8PtfLly+12+1s74/ZbGbnxODL5bJub28Vx7Ha7bYymYx++OEH5XI51et1Y9FwYByC693c3NiSJuAGm8GGlQQ+jIsNNykYRt4Bc4d2ljECZCgwpRC1XC5rNptpvV6rVCpZNxuWv2EHYGuLxaIFkqurK6VSKWNmGUevCUbqApO3251qCADUer2u6XRqumRYhuPx1O53OBxeaLhhGZGA0Lno7u5Ov/jFL4wZ5vsENYJSuVw2lhEJBCBZLBb1+vVrG9M4jvXq1StNp1MLdt1u1xgzz55gz/gJtoyMwncbAkikU6BiM83dbqcvvvjiQk6EXINAQY0HNQfo6ak7ADxgSAlMSEW4brPZtG5ctVrNwGk2m9m9p1IpdTod89mbmxvVajXNZjNjuCmY5/y5XE7D4dDmErkUMhBkLchYkNP0ej1NJhP98MMPSqVSevXqlSVnSByIF5vNxgAf+ZRvndzv9w3IHx4eDASJLa1WyxjOKIo0n891dXWlRqOh1WplXbuYX+o8kC6k06dC8+l0qmazqXw+b53LkJ0gbYqiyPwBkMOOOQBH2FWSL88EY1uACD5J7Q1Ay9wBKh6QYf58YpscH34kGJlgZIKRCUYmGPnzx8j3Hekvv/zyxyHG/3D8y7/8y5ftdlupVMpaQOIAGLtnYGDCCJRoMjF2ltN5+4fNQk/r9ZcYOYV6gBL/xz4BaF09mB2PR1uSZvJZJqxUKppOpyabgA3c7U4dfl6+fHmxrDufzyVJnU5Ht7e3Fpzp1AQTBKBQiNrv9xVFkVqtlgX8crmsMAztvBgHXWwAPWQTaLRhljKZjAV1v1wahqEOh1N3KeQdvNHjoEEQ2IaK6/Va3W7XJCx8niVzAiPaXK8ZR0tMQgHTOZlMTD5Sr9dt+TqVStlmiyQC0ondRcM/m80uQG65XFpAfHh4MFbDL1lTu8ByPg5TqVS02+30n//5n5pOp/rVr35l9pfL5WwfC9gwmKP1em1MFsvf3O/xeNock+DARoqHw0HPnz+3+4rj047xALFPEghwy+XSwAiGC9uMokij0cg218QO0DBj9yQugCfnyuVyVryL3eKPJFjeJxqNhgWR0WhkNQIAXq1Wk3TeAwXmko0tCbYEKcZ0t9vZ+aifIAllA9AoilSpVPTXf/3XWi6Xxp4T4I7Ho/7t3/5Ng8FA3W7XCp/L5bJ1QqP9M/aKbT4+PpqvsyEqyZJ0qu3I5XJ6/fq17UfE+KfTaXW7Xd3e3qpUKunFixf2DIz3eDw2WRNsNmOAHQGKjB3F08w13wEYWfnwyRPxFnaPWgfADfadZInYCEvHOPJdr5sHUGCWpRO7Nx6P77/88sv/+5OA4xM6EoxMMDLByAQjE4z8NDBysVjoH/7hH/7PH8OCj1LDhWGm02nTneM8FLtJ5176LAsTgNCf87aI3ILACIjwpslgMkC+WBM2AsNgCZGl/TAMLZjwhkyQZt8EgCKXyymKIis8pjsMgWw6narX6+lwOBiTV6/XzYAxFNhImAo004PBwJb4WXIFtKrVqulsARMCHEu63W7XPlOv1zWbzUwmQtEnb+o8PwEQeQIyjdevX+t4PBq7M5vNDERHo5Et4RMsbm5urHAYvTrzSBF3JnPqYERNADvZH49HSxL4vi/oxpCLxeLFfiYYPEv3LPezzwqsDKzYuwXijUbD5iKdTuuXv/ylbm9vdXd3py+++ELr9dqYouFwaIkP7CJL+8zjfD635AHpzWw2s2JpHHs8HmswGJjt0s1osVhYUEAClM/nbal7Op0ay0WStdvtrEsUPoRGvNls2p4sdD1Cb0wnJxg9mHFJ5h905kFC4BMwfC0Igot9Zfb7vTqdjjF/URRZ62bqDDabU2vp5XJpkiXmC7+C/ZxMJioUTht3djodG38CKvfS6XT09ddfaz6fq1arWeA/HA42l7BTdEY7Ho8aDAbWNhdpA6w29RQUvQMaj4+PJrfh3vCf7fa0Dw0JNONJQrdarQxE8VUYeZIwWLXBYGDyED672+3s37DL0pmVg2HH1kjuSE6QSMHoSrI4il15aYm/lk/aWV3hHMnx448EIxOMTDAywcgEIxOM/GiSwkKhYJrhavW0CzZGC0ORy+VsiXS/39uSKHpiGC1ABQYKrSRG788L0MBc8PZdLBbN8ZFHsIR7OBzMKGD40JWjMZdObEWz2bSiy+l0qlKppNFopFqtpiiKzFkxlkqloru7Ow0GA9OWFotFY3bQ8rML/eFwKjz95ptvbJk6nz+3psXgkXHEcWwsF8ELYIOlK5fLmk6nF+wXrUDRIOPQBHE0zzgZgJLJZPT8+XPbDBN2r9FoWHcX2NNms6lSqaTxeGxgjEwDmch8PlcQBBqPx1qtVgZ0sDMsIx+PR3U6HdPmc1/YQz6fNwkGS9O5XM6SGZwMRhHn4dzlctmYlR9++EFXV1fGMnqGKZPJmKwAW4PVkE7sFXKWp6cnlctlFQoFvXr1St9+++2FphsWkDoKNNaTycQY2NFoZNp6DyIeFGGpK5WKsXHL5dKkL94nGS+YYKQSo9HIEjISKALOeDy+0M0DJH5fGj642nTVAAAgAElEQVS/252KSpvNpjFkgONut7MkajAYqFQqGasPIMPuEiNgK+mqNJlMLLD3+31VKhU9f/5cq9XKuills1lNp1Mbx5ubG6XTaevIRS1KGIZ68eKF1Yjc3NyYPcKO00kOwMjn82q1WtbpCWDgWQFkdP08N2NLknE8Hi3pkmQJ3d3dnUql0oUdsVJBYwSCfLF42tTVy6h8YwXiIbEykzl3UuJnrkGSzXl8QoONkdhg71wnOT78SDAywcgEIxOMTDDy54+R75Pd/+S28AyM14KzDIdTwijx/0gaYLcoqiMAMThoq9FdM7AsM1K8y1ssGvZKpXLBUuRyuQv2K45jK+bFyWEHMBAKHLfbrbFXtCWFrSuXy/ruu+8kydiaYrGoTqdje4PwzARAloM9s4JDRlFkgALDB5vJMi2Tjw53t9tpOByq0WjoeDzt9fD4+GiBj+D4+PhohZc4AHPV7/dtmXm/PxUw8n2Yjslkot/85jf2f5lMRvf398ZMPTw86M2bN3p8fLRgTnvS1Wql4XBozkZBLqCezWatqw57f1xfXxsrt1gsDKiKxdOGfm/fvtV4PNZyubT2wyQSJC2bzUb39/cWpBg/zjsYDGxp/9e//vUFkwaDCuuEFj6TyVh7Vhybnd8BoHK5bM+63W5VKpWsZgB9+36/V7vdNjBtNpuqVqsGNGEY6ubmRrlcTnd3d5pMJsbmUYAsSY+Pj5JkDDPXhH2DIfLFqD7IwqjToQomieV06hAmk4lJDPBPlvABdgIe491sNg3MKc4mSdztdpZM4KPYgAcrmF9YcQDmt7/9rYLg1AULhrBYLJp9MK+wdg8PD/p//+//qdvtGisXBKeOWWyoyn4sYRgqCAIDAnzshx9+0Gg0uuhShk2m02nd3t6aNpygS3E9fl2tVu2ZgiCwfV2Gw6HK5bLJZVKplM0Pc8dzkhSSLKBLR8JCwkQba1Yj8A1sA/BhTjw4etaQGA8wJceHHwlGJhiZYGSCkQlG/vwx8n3HR3nhotMIgcKzLPv93pxOkr1xl0olW46DQUBXzIAQiPk+7BoPDLtBIRvBHOceDocXGtB0Om0dT2AWJJlGlmVDnAkHbTQaprcGMFlKfvbsmVarle7v7zUajYzhe/78ubrdrgqFgj3T999/b8/QbDYvJB/IApbLpR4eHnR3d2fFi8hLVquV6Z4xLJa9WVaXpLdv3xqjQoCBtcHQ9vu9FR8ioahUKhbkl8ulXr16Zc+K9OXp6UlxHCuKIg0GAwNKWITD4dTydDQaGUOKcTKGbMRHYvH555/r2bNn9iycC1a01+sZU8Gy8nQ6VbFYtIDb6XRs7mj7iyOSAOAYSDWwGVjWt2/fqtVqaTKZKJ/P23I39w9bxnzBekVRZHIdHFs6FY63Wi0DNa7rl8JJbpB2wCqS2MKMwnLV63WT6LCrPInwcrm0ZX8YF+Q6Dw8PWq/XGgwGdn3un2diyT8IAptvitO5NwJiq9Wy+cDPdrtz9y+eG2BYr9cajUaWAAHKMHmAWKVS0X6/V6PRUC6X02effaZU6lRA3+12Va/X9d///d/GZvtWxuzHg4yCehCYZ+ncPhYw/uKLLyw2kXAQuwACEtl+v6/pdGoxbjgc6vvvv9fXX39trbHZv2a73doqBDp/5CiMCVKJVqulTqcjScYsAtTcLzEAEIb99OCLTIrifUl2fWpTuK8gCCxRJaEgjkrnomCkb7B+nDc5PuxIMDLByAQjE4xMMPLnj5G8uP2x46O0hWepF0P0bNvxeLSlT252Pp8bwwbLQbCEtWOAJFlwY4IZHK+bJSAAaMgKKF7lbRe2qF6v24DCcvj2osgf2FxyNptdLCsidWADOZb/YR0IELwFE5QIgqlUysACZ81kMtbqs1AoKAxDYzg4J+CChno8Htv+CcViUY1GQ2EY6s2bN+p2uyZRSKVSF4GdcZvNZrq6ujKWCD1qtVrVmzdvNBgM9Dd/8zeKokiNRkP9ft+eA/kHLAJFpcwtwIF0YTAY6Pr62sZkt9tZjYIk0zfncjnrikRg3m63pldmXtm7IY5jmweciGBDsKJbEMwI11+tVspms7q6utJ3332nTqejer2u4/Foz0Vik81mL9hb6RSc0um0aX4JuKvVytjQ8XhshbowKXRRYqyZ3+PxaIkW0pp2u21doPhsoVCwJf4gCCywc+3VamVSFuYKliaOTxtgUqyOH8LqwE4RVNLptBXIEkT9Mn0qlTJpDmCOFh1JEnvvUAgOe0Wis1qtLFHkfhjrp6cn03v/8MMPVtROgTjPT8B9/fq1vvnmG/3t3/6tvvnmG2OEj8eTvrvT6ZgM5sWLF6rVanp6erLgDAg9Pj7qeDyq2+1quVzq5cuXarfbSqfTZpepVMqSxmazaawyKxS1Ws3uVZIVA5Pk+cQahvL29lb1et0kWaw6IJdBe+/ZU4q+CfjMDUkOcc6zcCQcfIc5ITYQ74hpyEWS48OOBCMTjEwwMsHIBCM/DYyEqPhjx0ep4cKZKBrjjRfHwdHQfQJA2expgz46oDBgFDvypouunDdynJ2HRXvp34xhHihwhWHg2jAbBETfsYZgydv8YrGwVqi53Kk9J4ZAAKE9J7uDM+EUkwKW6PdJXrLZU7cm9ONovsvl087fvMFHUXSx5Nztdu1eYUmy2awmk4mCINCzZ89MroGzAkQwTKVSySQKdHKirezt7a12u1M7VViXzWajTqdjwEQhZqFQuJAB+JoCpDTNZlO1Ws305pLU6/UsgOFszM9gMLioHSApKRaLqtVqtsxMNyE66zB3hULBCm494yvJWMt2u21L4tjVr3/9a/393/+9sbSPj49mxzBeJDG0Mubc6Ln5bKlU0nw+NwAjaeE7+AUBDPYTFg3NPfMnSe12W3d3d5JO7PWzZ89sE0lYXFgwvkMRO0H5+fPn+o//+A9L0ABvZBvs/wLbWyqVrGg9mz1tasl38UOuD0M+m81sPvBREjn/nN1u16RDzCcsNEnlfr83CVAcx8aYIymAoWdse72evv76axUKBV1dXanf718ETupNmF8SMnTnJF1IF2CFb25uVCqV9Pj4aHEslTpvfIisg7kmycU+iVMkMbTcZayxGWox1uu1JZ4k4J5BA8SxIVYg0K8z9sQowJaEmfhLbQCHZ4D52TddSI4PPxKMTDAywcgEIxOM/Plj5J/1hQsHQ5PsCx6RJvggmE6njX2CSWB5kN/FcWxMB7pXX9xGUMEoYY4AksViYcu8y+XSllWZkP3+1GcfWQWBjEmAedzv9zbJ/I6OL+v1efPCVqtl3yGo0TUJVhEJCcWNFBN3Oh1b5gawJpOJ7u7uTP+KpOHq6krH40lrSwFqvV43R0f3zfVms5lubm5M1oBUghaYMKoATDp93kF8tVrZ9abTqa6vr+3cOFmn07Ei0XT61AHm9evX5mAwC8gX0CCHYWhFw+jtYQ0BWBhU5lU6b8p5OBysSxEJwPF41GQyMYaYa3Nu5hBbg7nk2bGT4XCof//3f9ff/d3fWSChSBUmmOV7pA6SjB0GnLhfdNMAMCBFogKzXC6XbRyxdZ5/MploszltKsrzsrwNy4t9zedzbTYbSzZgoAgmPBMyErr+kGgwxmi9CcLMKX7ipT4c1Fdwb74WBUCTZHECcEBDT5CD6ZzNZibT2O/36vV6NpcwgiR1JGGLxUKtVkvr9do07J999plGo5GWy6XZKC2d37x5Ywwy++wwz8wB0phCoWCJL2wi84lsg+ci4BOE8bdarWY2xPOj10fTT9wEcBkT7NbX8lSrVS2XS2Nive8B3MQqkmHqW7y/IXfz/sf1Ye08SCXHjz8SjEwwMsHIBCOlBCM/dYz8KJJCjIs3Pem8JO/f4FlmpGCTz/i3QgaJg5asPARGTPCDSYFVqVar6vf71pmIzxP00FmWy2UzUu6PScKxfDcfimm5FnIM3uAx4NVqZfKEzWZjhaAU5G63W2WzWQ0GA223W7169cre3ne7nR4fH7Xf7619KROJvIOCwkzm1DKVLi43NzdKpVJqNBp6eHjQcDjUF198ofl8bm/1vm4AjTAshCT7eTAY6NmzZyZfOBwOGo/H2u12ms/n6na7ajabVqxdqVQM6MfjsQUugg/zBxMH49BoNIzRepeJYFkXR8UZPUOYSqVs/wiAWDqxWsw/duWXmZEyALCbzbk9LO1Nv/32W11dXVnbVp9UeMc7Ho/2rOyZgn3DHmLf2A1AB6MryQACG4elYfPGw+GgMAxt741Wq2UdfUhioijSV199pUajYWOJbAf/pM0xdgnzCtu0XC7Nlwh0fp64P5bQGQ//HGjlsS8SOVjYVCpliQ0tXGFnGc+npydLBmAzy+WyHh8fDXDRgTPvfk5evHihIAj0/fffK5VKqd1uW/JEMvbFF18ojmPrsEQBLqCHhCCTObVupsPTfr/XdDpVtVpVEAQWL5CoMA60wZZkUofFYmHSDor6gyCw8xGr8CeeCx9FEiHJGHlsE/BDehPHp6YMy+XSVi7wIVhEDkAPG+T5SYxIFnme5PjxR4KRCUYmGJlgZIKRnwZGjsfj/xELfjJdicNjeCx7wmwwwLAxGCGTwu9wIN7mYSEkmWFjiDgvnYt460d3C+NBEOAtFUkE4Lff760QF2cjaGHUAASDu1qtrA0n8ofD4WD7PKCRhrXxGu5Wq2XBBJDlmQeDgd68eaMwDG0jvDg+dYqCASHIsuzZarV0c3NjXWZgjpBgMB48H+wRzwMQM19IF1qtlkkbCBi0v6WQOQjOBY3T6VT9fl+TycRkEdVq9UImw/wRNHyR5mKxsLoGDB6WkRayzLskCyywTgRB7hvmlPHCxrycBlaC7kokGqlUSs+ePTMdfhCc9tTAZgBigr1njBkrlsPpikOggc0jYYKdXa1WVqjrGcH/n70327HsOq61x+77vt/ZVCZZRYqUdWFBgC/PheG3kgEeWH6J8zT+b2zAsGRLBE2yWF1WNrvv+/6/2P4i56YpmmVSN6y1gAJZlZk715ozZoxYI0ZE4HxwkK4eG0Bj8CEp9ng8bt3GJJk9HA6PBeTIGdBtZzKZE1vAedzf3+v169cnmmX2krVn1ggOEm08z8jaJZNJm60CMzSbzU7YVb/fb4wwv4uCXr/fr1evXplEh6CEswaThz6eYK9UKqnb7Wq1WlnB7mQyMaYWOw8GgyZhcO+PIMqtGchkMprNZvriiy+sMxXO2mX1Oad+/2NXOs4vEq1A4NjpijOOr2O9KaQ/HA6WycAXcb6xK9aSbAaSC4AI0AfAAGHu12Vj+Tr+CUbPDfS964ddHkZ6GOlhpIeRHka+HxjpSg+/ff0kNVwYDkWBFBdy6NBvcpgpGMZxuG/JsHmwgPTux+GhqZRkiwEzst/vTdMLG8tMDEAul8uZXpg3ZUnm8F2GAjDEyZJqjkaP8x06nY5pktfrtbWCde+TTQqFji0yMXgYAdgEd9hct9tVNpuVJGu1i0whEAhY4SVv59fX16Yv73Q6Buo4IPbF7/dbVxn+DmMCuxoKhWxY5XA41GKxsA5IOAXWLplMqt/v20EB/GezmWnPAQUkJgAD6W6fz6dSqWS/HxYLx8egOpgQnHggcJx30m63zflhB5KMhSCFTlCw2WyMdQGsAHbp2JIWrfXDw4Oq1aoNo8RZcLgAEZe5woZhPRaL46BLdN5IA2BK2dvFYmG1FN8OYpBTMA9jv9/rzZs3evbsme7v71UsFo3FOT8/Vzqdtg5knL9cLqflcql0Oq35fG4aa7Toh8PBzhkBDADKjBcAibS5z+ezfeQ+OdN0GpvNZiZLgZGCjQLk0YRTgM0gVXwATGen01GhUFCj0dB2u1U6ndZoNFIoFFKpVDI7xMZ9vmOxLXNbGO54dnam/X6v4XCo8XhstkSwGQ6HT5w3bXX5+nQ6tba42CQzclarlQ0pBfSwV4Jm9jAYPNZO7Pd785vT6VS9Xu+k8xbBMD4Oto71wQdyP9gmPpmaH9Ye0ACQ+F4+g+9z5RaSrHuTd7375WGkh5EeRnoY6WHkzx8jeSH7rutHZ7j4cH45i+syZrwFwuSxKJIspe4yAnR7wSGTtoMN2u+PrUU5vNzHbDazwkBYvXQ6bW/IOBo+w2XrkDe4xYW8ObvtWdl0Cmdpg8rbP/IRN3W/2z12ToLpGI/HlqaezWZ69uyZYrGYdW8BdGBzXAaA+yctjzP0+XwqFouSjk6MTjmAlJvyJrXK2rHGAE48Hle73dZyuTTnH4lE1Ol09OLFC9NVA8gMk0ylUspms9aBZ7c7Dgd88uSJstmsGSzacpgEgAOJi5suJrXLQefQ+P1+mxDOoEycPSwOuniXzeIQRSIR1et1O2g45OFwaMwazFi73dZgMDDpAoAKYCYSCZVKJQNLCnNh8wAwVzZC/QSyHxgrv9+v0Whkkg/Ah89er9caDAa6u7vTarWyAutIJGK/T5IFEXQHogsVzmE6nVp9Ag4H4OF8xuNx2yPAE5ZvvV4rm82qVqtZoTgFtzhbpCsUEQ+HQ5M1IU9gLTebjQaDgcLhsLHMyCpWq5XdF0EP4AfIw5gTePp8PpMOTadT/elPf9Jms9HV1ZWKxaLJOhjYyiwfzhtsMYwtdkIHqVwup4uLC2PFJ5PJSYclbM6VNMD2E2gfDkfdN0xcr9ez8wvzRsCCHh4AYM9gXgEfAhF8GIEZvs71fa7EjYvADF9NpgO/6l3vdnkY6WGkh5EeRnoY+X5g5Pe9cP0kGS70xxx6CuZI8/EWSxoVZgVnAnMTDAZNNy7JwEl6dHiutleSHU7eOF3HTjqVtK5riGhQ3VQy6VKAhWGCOFQ0mn6/3wqKMTDkATAPgUDANpMiYNgYF1BJ4/K1ZDJpDAHdXGA20bNSvMp6YEDr9dpazrZaLc1mM9PiI5/4droVsIYdSKfTymaz/20yO4d3v9/r7du3isViOjs7s3WCsYANI9XMYEzWgRQ6TACHD3sBwAFkUsAAL8yW250K50gwQqobpwCTHAg8zivBWQD+FFTSajUQOLailR4HMaLzJlAALGEcYa0oTIZFXSwWOjs7M4CNRCLmeGCG2WNqAehERGE7BdBuEDEYDPTxxx+btpmCZHfeDA4DW8BeCoWCgTZOajweK5fLKZlMmozIlX5wv7Cr8Xj8hN3C8fA92CmAn8vljC2UZMEIPgD/AZuJnYbDYbVaLaXTabMb5Dc4cs4j526/P866Qc//wQcfWPvl/X5vASL7Pp/PzUFnMhl1u13TtRMQofVOpVJqt9sGmO6/w37C2GPHOHDOLP8G6x0IBOzfkVyxHm4Nh1vvgE1ixy5bD0AAPPg5V95G1mS3O+r82Q/uXToGX8g6/icw8a4/f3kY6WGkh5EeRnoY+fPHyO+7fvQLF6l2DNaVRsTj8ZNCWgybN1aYKRgoDJcFAIhwCrBa6HRhm1gk2AeXwej3+8pmsyYxcFk20pT7/V6j0cgWH2OXHluwUvjKm7DbeYVUOo4BY0bXDesCSDH4DceOHODs7MwMHBkHrBUGAVhz2HBwGBCHzDXQ4XCo9XqtTCZjzj4UChkQoLmFaeFnarWa9vvH7i/r9VrpdFpXV1dWKBsOh02KglaWA4rTcQulKWZ+eHhQrVYzFpYLo+cZMH73MAIyru2hM14sFlYczAyPSCSiSqVijBCHBafEIYEhAbxms5nVDJTLZQ0GA9sL2qLiVN16AwCQ4BS7dnXaLoDA1OTzeU2nU93c3Bjj6qbk+Uycm3voh8Oh2TgBDc+4Xq8N4Pg82EDkMsvlUqPRSOfn51qv1ybNwLmzh0gzXHkNLCPdu3jeTCZjRegU91Kj0ev1TubCYKuRSESz2cxqBjgnFBinUikr1AccaEkdCoXU6/XMFwBET58+VTAYVL1et6COugR8CZ91OBw14c1mU6vVSk+ePLHADlkJAeZut7OgKxwO21mECWevYNLQrrOXaMj3++OAztlspmg0qul0akENRb/YiBtkwQJyRmHYuAdJJ6ww60kwhF92/TMsLbaGZAbQdzMg3vXDLg8jPYz0MNLDSA8j3w+M/L7rR79wwYJwqCUZGwALx0O4bUL5mqsrPhyOWmb34MB2oKd2Haf7tgm7BiOIcaBvZ4HYQBaY1Oq3Qck1jGAwaOwHP8fPSjr5bEnGHsFoTadTK2rE0SQSCTOGbreri4sLhcNh9ft9Y318Pp+xCqRFYfBYb9gDF0jQ7CIBIA3NvRUKBTtsSEvQiLsFkDBpGNlwOFQmk7FhkbT33e12Go/HZvg+n88OibuujUbDpr4D6KTAF4uFCoWC/H6/sVXYUzgctgODw8ERplIpY6/cw+LWMSQSCcXjcd3c3Fj7YKQFrEEikbA9px1oOBzW27dvtV6vValUVCgU1O/3TXvMAQsEAup2u7bfqVRKy+XSpBW0CMYGkN6wHwAee5XNZq2ugqAEUODMEbi8fPlSn376qYEkAzUBKZgdSVas3W637T4A8u12a0BCgAcQIKlA/uAGfsih0H7zu8fjsRXOJ5NJk0s8PDyYo0QOROqeICqZTJqzhlXK5/PmvAFNAjoAhsDNldxcXFxoNptZfUOv11OxWNSLFy90eXlp9sgFqGJvnEWCIs47Mqx+v2+a/X6/r8PhYPtPK1pXtw9ryH4B7m4wPBgMzIfiE5HMfFtuw+eylqFQyM4LwTIg6IIMhenu97MOBEz4VEDF9fHe9cMvDyM9jPQw0sNIDyPfD4x01+rb108iKZR04mRJ5eGMMMxA4DhALBw+du4ZDAZWGImEgQdkgfnDIsI8SbLP5oHR27IpvHXicF0WcLlcWuoeFo3UJYPVOJAY+m63UzabNYaQ4mMcO8YGI8hzrVYrO7ykYykU7Xa7BqA4JJehYT1YYxyeJDvsfA1ns9vtzKkcDgfVajVJR4fBQalWq9bJCC37mzdv5PMdC3QHg4ExQZVK5YQ943e/efNGhUJBi8VC7XZbyWRShUJB0WjUZAOwmTxfOHwcIBgIBDQYDHR5ealOp2MBAmtJjQAHkSBBeiwQXS6XyuVyVgAO4Lv63XA4rNFoZIdvs9mYBCSTyZgzoKVwMBhUNpvVbDZTOBzWxx9/bOtKgMNBhxGFnaELD5KMfr9v9sA9w2ziVNgvnhP74D4AcVd6gU1LMmkF600hOXYDqwXwAYTIGABU1h/W1+0UhV3TQQnmcb/fW/vYi4sLGwR6dnZmE+HRoiOXcYvqYeywEewROch0OpUk23fXEcfjcdPww8Jls9mTGUIMdf3mm290dXVlTBVSm91uZ7p6QHAymRgokSHA/qbTqfb7vTnxbrcrn+9Y1E0dSyQSse5x+CVaa/O9MIibzUbVatXOhd9/rEtIp9NKJBLq9/sWZCAxQk6BHAbWEGYPZhWfy+9FykLgia0RIGDjgBD3ji/HJ/1PDJ53/fnLw0gPIz2M9DDSw8ifN0Z+Hyn5k71wcZEuxcli1Lxhut/D2zlsC2n5zWZjm+gyaDgJ3sx5MFKHGCGHolAonLyFkqoMh8OWfnRZLxZwNBrZhsHEMQDSZfYmk4kCgcCJ/pZ0ZSAQOJF1cBjQp8Jk0gq31+spEAgYK4AmlAOM8cGKAozr9ePMFgCbA7/fP9YCkKYej8c6HA7WgQcjdjX2HK5AIGAsKwcRXavLGPJssCqAuvQIcovFcUgkxg4b0O/3Lf0Na8tnAp44T+4NJrPf78vv96tSqWgymdhhRzohHSUQd3d3KpVKpjFmPXGkFKn6/X4bVkmavlwum7Pb7XZ23ziPxWKh0Whk9h2LxdRsNtVsNm0+DjIiAg7a1LoMMBKV+XxuM0Ly+byBlvTI9K3XaxWLRRvqORqNlEwm1e127XMBPQpkmVUCew0zBYtaLBa1Wh1ntTBwE+CByYpEIsYosZaLxcLkP9hDo9HQxx9/bCwTgQDnnHPDvUoy22f96vW6SbAAYQYYss/sMcEhxfTlctnY6dFopOvraxWLRVtr/NJ+v7ehlgyMbDQa1iksn8+bDblyumazaZ8XDAatq1soFLLACF8XCoVMWjOZTOwcce+xWMxkMolEwoLc7fY4WyWXy2kymajf7xsjTt0Ea4AMAzvjjBCcE5Djb92aH2xru92arIuvAVjYIAG6d/24y8NIDyM9jPQw0sPInydGft8V+Oyzz773G/6n6x//8R8/y2QylpqHJcOJ8eaPobGIfC+AgB6VBcPR4+RJOfPGiTG4DBwHDYAKBoOmkSVlyqK6unnkGDgE7m0+n6tarapQKNim7PeP2lM2IRQKneiv+d2hUMjajNKVCGDj2TEcGDLezmG3kEYAqDgz1hUQdtupkvpPJBI6HA7WMYbDyRt5o9FQMpm0LkSwprRF5V4AQZjWYDCoUqlkQB+NRpXL5QywYVhSqdTJTJJ6vW6MHsXBMDiwCxwMvsa8EEnG+NBNCm0wBZoUMFMUPpvN1Ol0LKCJxWI2RBDGCl19MBjUaDSydr8cOFgngBr7RYLB5Hg6c5VKJY1GIwUCAXMEgcCxSJQaB7oK+f1+q22AJYtGj0MWb25ubL4KmnuceDwe12w203Q6VaVSUb/fVy6Xk9/vPxmoCUNTLBbV7XaVyWR0OBw0HA4Vj8f18PCgeDxuAdJmszHAwJZYB2pN1uu1tbJlnzKZjO1NOBxWt9s1BopggICCYuVUKmVBJTZE3YXP99ihiza3fr/fNOK9Xs/0+5w3WDECA+49n88rFotpuVyqXC6fAHAmk1G73dZ//ud/mj0iUYnFYif1GvgjbJcsAv+/2RybAazXa2PRuEdJJ75uvV4b40qns9FopHK5bF2ykKiEw2EbCBsMBu1Mw2wC2Pgmn8+nwWCg8XhsPpcgkgCdoARb3m63J1kOSQacBJoAyX9JYxqfffbZ//tRwPEeXR5GehjpYaSHkR5Gvh8YOZlM9Pd///f/97uw4CehK2E+ccZoq2E5cOAwCrzR8ne//3EaOmlnpAK8eTKfw73Q0gYCAUvtk+pNJpPmGNbrtXj+xbMAACAASURBVPr9vmmuccSkVgEf6XE2BUzDYnGcsfHw8GAsHm1l2XhJlt6czWbW8QmZAIeJTYJVQ3NfLpeNLUGfS1EgTJ6kk6JTQCkejxvj12q1JMk64wAOsAKZTMbui841dAUCkPP5vNUc4EgwSgwccIJBkmTgk0qltFgsjO2BuQCkASsOZjQatbaj9XrdZlkEg8fiTxwqQzpxljgg2FYOII6Q2R3McRkOh1oul2YDANdgMFClUrEWxNPpVMPhULPZzOaLJJNJSTKmZDgcml0TsMBcxuNxXVxc6ObmRtvt1rpZ8b273XFqO8/I1wksgsGgMUEEIsgy0um0yXmw9fl8rg8//NDOEXaCPAJ2tFwu63A4WIrf5/MZ63U4HDQej1Uul624d71eG/vY6/UM6HD+1B3M53Pl83kDR+QeBDOcDexOkp3lcDhs5wF72G63qlarkqR///d/V61WM5YzGj3O9snn8+Zoe72e9vu9crmc2cNgMDBgdWUyAAUBz2azUa1WU7vdNu21K0lxAZYzDKPd6/WUzWa1WBxnw1APUSwW7dzD7lLwDKtNMBmJHAeiutKuXC5nUgt8TDAYtEwE/oZuXDQGkB47gtG5DYAnCOd8utIkGF3YePw5Ugn2RzoFRO96t8vDSA8jPYz0MNLDyJ8/Rn7f9ZPM4XJT6dIjM8LbLXpxWC3e2mHmUqnUSeFcInGckA5bJMk+jxQfg/ZIndIhBkYBx4uUAXkBCyPJZpK49xWNRu2Nm9/Fxvv9x3kak8lEs9nMWuKSvq9Wq/Z70bPifChk5SDTAYf7mU6nGo1G5uAAUnSp/H5ACBkFn8cBicWO09TPzs6USqVUKpVUrVaVy+UUiRznt8A20A2IVOt4PFav1zPgGAwGxizwPMgU+v2+FVtziF12kWJISScHCpYJ/Tup/2QyaYcYQIHtASww7vF4bMyb6+zcYmkYZHTdyFM46ADkeDw2/S37zfyL/X5vXydQ2u/3toawaTA2rL1rX/P53PTxsEB0wcJ2CYDo/IU0p9VqGbBxj7BgzWbTHA3MEmzWarWyblLz+Vw3NzcGKG5A5Z6pw+FwUjy+Xq9Nh57JZOxnYHJLpZKy2ayKxaJCoZAajYY59fV6rUajYZIImPFKpaKLiwvbEwq/eaZUKmWB2mAwUKFQUKlUsu5ShULBirUnk4nJNMgYwNLW63VjY5HipNNpq89AhoJfuLy8NHkGzBvsJC8ZAOhqtTJQwB7q9boFdDwbARId27gPSeaf6BYFy8vnwrwVi0ULoJAAsWcUlBN0TCYTkxu5kin0/9KjrISuW9w/QOPKxjgrh8PBfhd24l3vdnkY6WGkh5EeRnoY+X5g5PddP1pS+Lvf/c7kEjw4Tps3XjbFTcv5fMcuPLw18pa9XC5NU45T9vl89ibNwUJbC2uDkfOzwWDQev0HAqdT7SmIJCXPv8diMQMdn89nTpvvAdhcrSZvxiw6hwSNKqlc2niig8bxoiNfr9dKJpPm9NA9Azowdjha7h2nMZlMlMlkFIlEbMgfTpM06WQysQnrrjOGFUBLv1gsbB9jsZgNH4RNdB0qbJ5b1ApzAMOQTqfV7/dVKBQsJU1wwfqgjeVQwLDBEgFGbqoZcB4MBvL5fMrn8ydMmmv8OFr2FVaPguFcLmdtYl+/fm3rRPEwAUsoFLK9Z8Ah6zQajdRsNhUKhfTq1St9/PHHVljMfsPWSjJnA9NGEIaTaDabxkJGo8d2z3RyQmKRTCbNNpDurFYr6+IDc/bmzRtjZkmhu8xlqVQ6GR7oBkUw65zXSCSiVCqlwWBgAQDMDoHiYrFQvV7Xcrk0UGO/2Nv1em11GaxJLpfTH/7wB5XLZVUqFb1588ZYNr/ff1LH0Wg0Ts4NGQFJBjbUbdBi94svvlC9XjdHTcczghAkW3SvglkF9JD8jMdjxeNx6zDV6/UUjUZVLBY1HA5PuiMh5RiNRhacAt6wpwTdi8VCs9lMpVJJsVhM/X5f+/3ewAawgOHjrLjrCsu8Xq/NnwKU0mOwjx+JxWL2d4IL7nG3250UFXuSwne/PIz0MNLDSA8jPYx8PzByNpv9WUnhT/LClc/n7RDAfgEu3Bh6TtLMOGhXawyTxeLAIuH4ACmcAJIGNpWUNFpOOt9IMiZnsViYU2SRYFVczelmszFdOhvnDnpjIjxvwRgFn03HJobtcZBwwDBx/C4OwX5/7PBye3urVCqlcrlsG1sqlaytKg7M5/OpUCjYYUfzulqt7A/SiW63a44EPTSsItrf1WqlTqdjaV0OJqCO8dIpikJFijCRgLBPrA/24fP5TA6DHIC9Yr9gQ9HcM5F+PB5bmrvb7ZpcJpFImN45nU7bWuM4qU84HA7W8haHwYGMx+PmYAD4aPQ4t4Hvw44BApx8oVAwaUK1WlW73dZmsznpRFWr1RSNRjUej60T0X6/t/WrVCqm+Q8GgyqXy1oul1Z8SoEqDC1McbVaNXmSe77Yq3Q6rVwupxcvXugPf/iDOSm0/LC0yJw4w1xondkbJA90nbq/v7fgiz2inoB/C4fDajabur29tc8KBoMn9SwAPLUCMKKpVMq02NhWNBpVrVZTNpu1s7RYLE5Y58lkolgspkwmozdv3qjX6+np06eKRqNWJDyfz0260e12TV8uHSVHzIKBmd5sNqbN9/l8enh4sCGvyElgEQF8/IKr+8dvIItCe//27Vs7W6FQyAZHTqdT9Xo9qyEheHCf2917/CfBGAAvydheAmOCNQJx7h9QJSMDUIVCIfX7fe+F6x0uDyM9jPQw0sNIDyPfD4yczWb67W9/+5d54fqHf/iHz2irib4zGo1aqjUcDls3FB4QYwJUWq2WstmsstmsGR9Oxe0YAjvkSjD4OynjQCBg3Z/QBdO5xa1BcGUVbHY6nf5vBZvodBlYSNcYmEO00hwKSaYxPRwOJ4xJOHxs9xqNRk90oYFAwNLykiwNXy6XrRVrPp+3AkuAFXZvv9/bYEHWHX03xoTTZHjjer02/ar0qK9HwxoOhy3dXyqVjI3DGe73e/V6Peu8tFgsLC3LZ2LAHDb0761WS71eT7/85S/tcBAw4BTdYIKDidOXjuzW+fm5Pv/8c83nc33wwQcnaW1YEA47em4Oi8/ns6/v93u1Wi1z0JFIRN1uV5JULBYtxUwqH2aZlDlBRyAQMB1xvV7Xf/zHf1jBqM/nU7lctj13Z9/EYjGTZlDgyqDCQqFgrVthYfr9vvL5vLHYpMVJybvdpUKhY2vfUCikt2/fGtu0Wq2MSeUMoeNnL5H1UOCNc4FppWMTbY9LpZIxXkiB0KAD7ofDQQ8PD5bSn8/nVnQuSV9++aWKxaI+//xz0/szNLHb7do6wzA/PDwYeAJeblvoeDyuL7/8UtfX1woGg0qn0zo7O7PPogHBkydPdH9/r4eHB5VKJWUyGQuC1uu1Xr58aQHybrczCdF6vVYul1Or1bIaDzpo4eQBYlhApBiAcL/fN9lEIBAwWUO327XZONgJgSTgxnkLBAJWAwDQEGSFQiELwmBn2RNXmw5Th33ze/gd+NXBYOC9cL3D5WGkh5EeRnoY6WHk+4GRo9HoL5fhogMTkgYeEsPBgVO45nbdcVN9GAtv50gwWGTYqlwuZ738t9utTdeGLWJxwuFju0YOM06GhWPIG2wab+gwb65ml03HAWNkOC8KV/l8nDUbiBFFIhE9efJE+/3eJrJT3NjpdLRcLg0A3QGYgC8OBTBD2sEwOdYcZwETRCrZbYO72+0sFQ+TA3OZz+etw896fWyx63ZhwlGEQiFzsIAVsgIKmpn1wFT6WOzY0jSXy1lhMEPnXKaUe3YlE/v9Xslk0pwbbBFzKyQZgKJjJiCQZN8HoPZ6PW02x9ayo9FIb9++Va1WM8aOegWfz3cSvMDK7nY7PTw8mDOFsfv666+N3Ws2myZloKDXnYvi6o5hVAks+v2+drudDdFstVrabDZWbIs0BufD3A303KFQ6GSIJRpvSVZrsFqtlEqlVK/X9fLlS0kyhtcNDmFvYBJns5nu7+9NHpROp5XJZNRsNg0YACAKfWHLYA2z2ayBJ2xuNBrV69evVSwW9fLlSxWLRZMowHQj61kul7q7u1MmkzG5y3a7tcJ6Oj0Fg0GdnZ1pOBzqxYsXOj8/N1kOz4+zh211g9DZbKblcqlsNmsSB0B1MBjY+eEPzjgQCFjgC5MOK83zwgb3ej1jwe/u7tTv9/Xhhx/q888/V6vVskCE80BTA5w9gQa+NBAI2JmDwYc5hLkDgF2J2Lf/jk8j0AsGg+r1et4L1ztcHkZ6GCl5GOlhpIeR7wNGTiaTv1yG63e/+91nzNbg4NFtR5K9IbspPEmmW0WvyYMEg0ErWqR4D6YMQPD5fBqPxxqNRvY7WCicP6l6jESSgZfbtYRFprMNxsLXfD6fFdC5AyBJ81PEy7/DXHKfk8lEy+VSlUrFGDy//1gQO51OVSqVFIlE9PDwYIYGSxAOh61IlEMCAFIgSxvS5fLY0pPPxyGSDoZB5TCGQiHlcjlVKhWtVitLXaP3HY1GarfbxqQiieHtfjqdqlgsmqaW9eWe6MQkSaVSSZ1Ox/TWFxcXqtfrurm5MZYDJgzjJdhYLBambXZlKICNz+fTaDSyVDc2grOYTCa2l5FIxA48axaJRFQoFNTv9y0gAPw5eKT/W62WcrmcRqORDdwDQJn7Qi0AUpDlcml7TLtSvhfwgPXkGVOplM7OzhQIBHRzc6Ovv/5aV1dXJr358ssvlUgkVCgU9Ic//EFPnz41lok2yRRhz2YzDQYD+2ykO9Pp1Fgntzh2s9kY8FATAtsZCBxbyhJIUHuAjXQ6HQt8ttutBZKcDwaYck7C4ePgxkgkYgwdNSqr1UpnZ2em7ae+A5CHFYOJ6vf7xpLiL9Dmw1D2ej2TJ1xcXNizTSYTdTodCygBCpeJLJfLqlarJ8FeKpUyNhlwj0QiVkTf7/fV6/UMSNiLVqtlv4OaDVh2AiokP69fv1Y+n7cuV7CqBMWsMcEqjh9/RDZkuVyesHK0+PX5fP+NuePzCRDxg3zdy3C92+VhpIeRHkZ6GOlh5PuBkX9RSSEFwbxZoivlDRA2D3YNwGABWBS/329v0xTQdTodY4pgeEhVYoTozFlYJpAjRyAdz2JLsunhqVTK3vK5V4yDA0WHJpy3e+hJb8MWhEIh+/9AIGAs2cXFhXK5nG1oJBIxoEwkEtZxBUfHJruFthzO1WplrXUZcIcGHJ0+MgrYUNafN3nWAWBCksLvhU0CiBl4Rwce9oeuTuwlHYMAREmqVqtmkN1uV9PpVM+ePZPP59OLFy/UbDaNsYIBdhm37XarwWCgXq+n0WikbDarRCKh0WhkBbZ0F6pWq1osFqZnRnYQDodtoCRFuJLUaDQkSefn55JkuvmzszP1ej0dDgedn5/bzJB+v69sNmugiiOVZG1x3YLadrttRcPn5+d2rzCOfr/fugjhjKLRqNLptG5vb61+4nA4tqRdLBY6Pz9XrVaz9clmsxbUDIdD6zo0m8306tUr0+sjU6CYF3kGUpder6dUKmW2FY1GjSmFHcdGYZ9wfsykoT30ZDI5kUxJRwkRjPxyuTSGqVAomGSKYCeTySibzUqSzeBwnR6Odr1em6yHFsPVatWY8/+Sv0mSZQey2awqlYqKxaIx5ovFwiQiw+HQdODcF76Gc4LMQDqVBlG0i69CzsL3EvDsdjsb8Ih/JNPR7XaVSCSsk9V+v7eOUjh1np36EgJpnpkzBCC4ungkUuyd9DjkknPv3rvL1LOX3gvXu10eRnoY6WGkh5EeRr4fGDkej/9yc7hYBN4Q3ZQboMG/SY9MGywRbAfOjnS33+9XOp02Haz0WEy62WwMsNhYd4MpeuOQ4jTpXsTvgwVAMkERIW/nq9VK4/HYhrOVy2U73LPZTLVaTb1ez/TQrgHsdscWsZPJRJVKRalU6gQckH58W69cq9V0f39vwwqlRz00mnfSmJPJxCQRw+FQ/X7fhlgOh0Mz9NFoZIWWOGD2AHYFNmK9XptjSCaTymazVuRZKpVOmL7FYmGzW0ajkRXODodD+12NRsOYUaQm/X5f0WjUnARyGwqa5/O5Xrx4oUAgoGw2a7IOtNir1UrZbNbSxu76sJ7ZbFb7/V7lclmbzbGNK+nuw+GgbDarjz76SM1mU69fv7ZJ6KTXU6mUscOr1UqxWMwKeWGp6T6EXAD7415x6jDKOOm3b99aZy+kRQQhAMNutzN5Bd2IYH+wM3Td33zzjbXRpRsRuvxOp6N8Pm8FrrCGu91OzWZT2WxWgUDA6hxgeQlWKCiWHougGcCKDAI5SSaT0WQysZbVBF+DwcDYK5/vWMBODYTf71ez2dTh8Nj9B5Z9Op2ajGM8Hutv/uZvrNV1Npu1ItlYLKanT59KOspA0IXjJ/AH0hFUaM8Lw0mwNZ1OT4pqcabz+dxAKRqNGvjudjvT/nOO8HO0CeZ53Ja++DCYd5gzPn+73ardbhugwyjThQw5C0Gmu24EisiJCHD5DLIoBPCSrG2zKwEDUAjUkePw+d71wy8PIz2M9DDSw0gPI98PjMSnfNf1o1+4WDzAAwPB4fHWiPY7GAyeTBanEBTmjFQ2hxejkGSMRSgUsm4nOFe3uBanO5/PzYHAYh0OB5MQcJA51DjRXC5nb/6ka3k+mKVQKGSOjNQyLWV5W57NZiesByyHJGMGs9msFZ/W63VLUZNW5llh2Ugrw6LwrOVyWavVSr1eT9vt1liOXq+n8XhshcnRaNQAwu/3mwNMpVIGojgNnPhqdZx/USwWtdlsTF8+Go00Ho9VrVZNb81cEZ/PZ/MxAA708be3t7q8vFS1WlUqldLt7a3NgKAAG1uKx4/T3svlst0j68D+0OqU/eQwxeNxDYdDDQYDpdNp05DXajW1Wi3bz7u7O7Oth4cHXV9fW+DBnsJ2TCYTY/AIiObzuYbDoTFp3W5Xq9VK1WpVh8NjVy5kOX6/X5VKxQ4tz9NoNMwGALVgMKibmxsVi0WdnZ3p5ubG2CvWloGPBCQ4zlQqZewg5wEJAA6UgAzwhf2eTCZmuwQMkmygJ06K8wmoYNv39/fKZDJmo67+HDac9UbWBHvHui4Wx1kqtVrNpB20R45Go8rlciaXwaaxP5fRpwgXQIUNzefzVscBiw8I0iIZW+QeX716ZYXFfv+xeP7+/t46SEkyWQy+yGX6XH+Cb4zFYhoOh5KO0iKY93a7bUw0jCcAiOzMlai5gbff/zgzxJVA4G/5msvaucE560dAQCDH373rh18eRnoY6WGkh5EeRr4fGMm+fdf1k2S4uHEeFu2qJHsIWDIMGPkCqViMkf/n7Vl6nAqPTpyHZzEp/OTzOVQUVsIqzGYzm5dAKpffxRsxz0MaFcOAaeQtezqdWjFkqVRSMplUs9m0rkQwfRjB4XCwAYpIJrbbrUk/AEd0smhqeeuGKVmvj/NN+H0cXjSnACfpaAp63cPu9/utgw8yFveNPRQKqVKp2Bs77A1MJWsXiUR0eXlp4IfGeb/fq9PpGANL0evz58+VTqfV7XbtIMP4NRoNdTodzedzFQoFA3Qc/suXL3V5eWlgHYlENBgMrBibgkraoEYiETUaDU2nU2OKYCNYy0QioSdPniifz6vdbuvZs2dqt9uWskZn/e26BmQ/sL8wScgfYEUpIMfeYWbcWgA6PmHfsG2Hw0GNRsOc7zfffKPLy0uTruCEr6+vVS6XDUQ4H/w/mm7qCG5ubvSLX/zCag7c9P96fRyWisPmvMXjcbMxHBHzLmDPXJ0+afxisSifz6darXbipJAmwDBx1tmf5fI4H6NSqeiLL75QuVxWvV5Xp9MxnTrr7kqn6GzEJHsAnCGVtLAlyEQy4TK6i8XCdPR+v1/ZbFapVErL5dJYZ/xHvV7XbDazM3Bzc6NarWb+ARABqKbTqQEIATPntVwum3wsGo1aC2j8HAA8n89PwAlbxP8hvYKJJfhysxYE1pIsC3I4HOwPzCCfD8MIi+dd73Z5GOlhpIeRHkZ6GPl+YCRS4e+6/D8WTHBQOEDeIiXZwaXb0XQ6tYJOFgAWje4kgA9s32azMeeAVIEHZVHcNB9FkDBY/A6+nwUkvQ1DwlstrJvbIpXiYxiodrt9Ag6wN7wJ+/3HdpP5fN403DAOhULBDIpDj3OXdNJKczKZ2BR2ihnRhZPedtnT0WikVqtla49mvl6vm+OjGBWDc50rBx4HzcEGZEhh03Upk8lYS83pdGpD/qgNoIsUjvTq6spmO8AEwGK22+0Th4vjGA6HmkwmqtVqms/n+vrrr3V7e6tAIKDz83NFIhHTFTOLASC9vb1VMplUvV6XdGRAYG9g8pDrwFRmMhljBUmVD4dDrVYra+/77WdjWCdF2QDwaDTS+fm5MpmMdSPiMMPiwTqHw2EbXHhzc2Np8t1udzIbg+BmOp2a5p39wY7pzgUDutlsNBwOdX5+Lor3CfAIjpArULMBexUKhUyLjg0RcKB9B/BZ016vp6urK2PWGBbZ6/WsqB5bonDWZRNZYxjRTqejbrdrRazJZNICK+oVKM6dTCamA5/P59bFCgkWa//VV19pOBxaUINenGAN1oogZD6fazKZWKADSCHNSSQSBmIAMI6ZQBrgm8/n6nQ66nQ6VrAOu0lxM+fO9U3IiSgCJhjkD5/j8z22KuYPHdVYY+wQ+wFQJP23r7msIxIu7/rhl4eRHkZ6GOlhpIeR7wdG/kUlhYHA43R5nPy3b0ySpTE5PK7Gl8XnzZAbxuhg2TgEvOnztgsIsNB8nYVArgBjttlsTqQZq9XK2k/CDG63W9NGk/pfLBYqFAp6eHgwJoCiSdgzDpyrJeWAk3KHqeFCq9xsNi0tzRrx9syGw6DALPAWztdoU0u6HZAulUrWcYg3epwCoAqgUFSLfAI2BXCXHp2P9Ng6EyDn/nkuHEAoFFK/37c9yGQyur6+1osXL6zzDh1vCA7evHmj7XarTz75RP1+X51OR1dXV2o0GuYYer2ems2mPvzwQ9OBMzUd5gH2AyYFeQnabLoA0YIYhkZ6TD27xZ3fZqeRUpTLZQMXn89nbVjRM8Pe4Sxms5mtLfcmSS9fvtTFxYW63a6CwaDy+bzu7+9Vr9e12+10fX1tQEo7Yp6VegcKc9HeZzIZFQoF228cHufGDa6wBRh0V6tN+h3ngh11Oh0bVBiLxUyWQdH6aDQ6kS7hD7h3LkDJrSHpdDoKBI4FxMhyYOoYdgkDR6taV150OBzbDz958sSCwLu7OxUKBR0OBwsGAFAkFfgkpB4UNRMgYss+37E98GQysQ5y7DFsOhIw7IWfG4/H5kc5f/gNF/ABbRh3GFDAzw20XcDgfgEdAI7Al4sgH/t2bYM9RsblXT/88jDSw0gPIz2M9DDy/cBIvv5d108iKWTB3JvnzZOb3G63xgLgYLh5ug7BJEmyVO5ut7OiPRyMqxVn0WANASbecH0+3wlIuRuBQePYADo2KZFIWLobZkM6OhdmO9AuFaAC+CjAhUmIx+O6vr62YXKkiHFarpMG/AhueBa6+fC8pERhOHFedAyClbq/v1etVrPvlWSOlD+hUOhktgrspOtsw+GwMW8ArKSTYXakzLfbY0cndLwwEzhx2EValdbrde33e93f39tes78ffPCB6appmdtoNNRoNOygVCoV1Wo1rVYrY4XOz881m83U7XZVLpfNkR0OB5NdwKphp6PRyBw+TDN6YuyWQmECF5zCcnmcel8qlbTZbOy/HEAKt5FH+P1+s3k+OxQK6dmzZ3p4eLA2vMzXePXqlYrFopLJpMrlsn0WLC6dkXDEBDvBYFCNRsP2DsANh8O6vLw0xnU+n5v9up13ABg3gHBlT8gmMpmMnT++B3a33++b1hspEEES5w7pDCwybBP7hZTDDQ5gfOkutlgslM1mbU8pmn54eLDWv+fn58rn8xqPx4pEIgYotLN1pVdci8XCno92xuzpeDy29dlut7q7u1OpVDrR20vHNtkEoXw2YNDr9ez8EIi7TCnrwl678ofdbmd1L9QLYE+cUxhRlzlm//CPfKabkQHIpMfOc971bpeHkR5GehjpYaSHke8HRv5FX7gwuPV6bSk2HpgHBTBYFDdFhyMDgEgJxuNxax1JURw/z0a4hu0WXR4Ox+JQCn/d3+dKDNB/7nbHTiocbj4fjSmsIUxWqVSy1DdvygCDJLuHyWSiXq+nUOg4/BBw4fcDlv1+X4VCQel02hiOSCRielZS5/wuDhCFyBQ74kQikYgViOLwBoOB6vW6GS+F2y57Nx6PrdUr97rZbKxFKkWyHBIA1WUOCoWCIpGI7u/vdTgc9MEHHygaPQ6/4+0fyUY0GlW329X19bUGg4H6/b4+/fRT3dzc2JDAXC6nq6srjUYj07tPJhN99NFHdvjPz88tVQ2IlctlY15Z52QyeZJaxmGMx+MT9hbmFYaYWSGz2cwCTr7utl5mDQmElsuler2eyQoYFshBRuMO8+sWXdbrdd3e3kqSfS2TyZjuGwZ6v9+r2+2azGE8HluraNL4MMU4fuoIKCjGwbF23Nu3zwyBGJ+BVpv1yOVypoXH+VAjst0e60Skx6JiGCqXLZQeZwFJMkeHs8Zm0cDjGJfLpTlNv99vXYVWq5UGg4F8Pp/q9brdMwCJPAK5CSw7No0zpbUuMhpJ1oEL34XPQZ4SCoVOWstS50FdAr4Pto4Algv/yDNSOOxKXPB/+EU32Oasur4appjvcf0Y98bnuoCFjbvg4l0/7PIw0sNIDyM9jPQw8v3ASL72XdePfuHioWHupMcWrTwcN8T38He02XQUIfUM20faFCMjDT2fz+3zDoeD9eSn3SULSmtOCl5h/GazmdLptAGW9GispMrd3++mlg+Hg2lyWWy+Dovhyge+fSDpysRbOcbJZnE/3DtO2nVGGBMOZLc7FmNT+MphgSV5o2NRkwAAIABJREFU8uSJer2eteRcLBb2cxjScrk08AA80RzDbLKvGC7TxSlsdBlY5B6kmJfLpfb7Y0Hj5eWl3QssCKBWKpXMwcK8AIB0NGJQ5vX1tTqdjvb7vQaDge7u7owhvry8tPs8HA7WCcfVWA+HQ0uDE9Dw3/V6rUwmo263a3vI53DwDoeDyU4AfXTI2WxW/X7f7I0gizayMCs4bp/PZ3NnRqOROp2OMV+z2cy6LDUaDeXzeQukaAGLPAVpDqwprVYzmYyxktjeeDxWu90+SaEnEglj8Xhu6kewe86Ly4xzrjm/q9VK+Xxes9nMWuvyGfw+tz5ltzvqqJE+hELHQmmKvynKxTlvNhsLBmGggsGgEomE1XZQg0L7Zkk2WR5Qor6EmTWsD8EbAOz3+63OBvBBguIGp4fDQaVSyYIp5B3ou9Go53I5zWYz9ft9AxdAHzkJNrjbPWr26Y4GsPL9wWDwpOUujD5r48pc+HzACr/s1gS5wQ7BpQvy3vVul4eRHkZ6GOlhpIeRP3+M/L4Xrp+kaQbpc36pCyau7pV/w0hxIuiwXQNbrVZWHMtDUYQZiUROBs4FAscBi0gTeGjSlBwwukPxNVeDzffhqCkyxMgXi4UVvMJY0AmGDi4YDAdGkkk3MN58Pm9v0RgR8zIwVheAWT+Mh/UF1NLptKW2/X6/sTHSo6ZYkrEAGBegg9TBZUFYP2ZVkHqnuBJnz88BjqxltVrVJ598Yp13AFsONxrsxWKhfD5vDpbBi2dnZ0qn08aIUTQJ45HJZNRsNs3pz+dzYx1hn9wCSFg6nCLdtfgvXWsIBEiLu8ELgcN2u9VkMjGgdlPQh8OxoHg4HBqz6fP5NJ/PTZLjpqM5P36/3+of2Nv5fG6tc7Eh7qvT6ZywXtFo1IqWfb7jDA9kFhRbj0YjC94IjLC/4XBoBdXdbtdmgYTDYTs3PMNms7GAaLM5dhNbr9caj8f2ecFgUP1+X4fDQfl83p4RLT7PA0iwhtg6Dg4ZB22bYbgBUIIXHF46nTaGC8Y/n8+bXWHj2BIBFMFJv983uQxnJRgMWrE7basBUQICzqokkxDhA1xWkjOcz+dtzabTqcmKaETA7+RzCShd58/vxbaRIyF/4txJj3U++A8CcUDTDaDwIXTdIhNBcIzv864ffnkY6WGkh5EeRnoY+X5gpJsx+/b1o1+4cFSk6qXH9owsursgHFj0k0gDeCPFWboaTkm2caSvSSHDqPD70WaS+uYeSJmzMPwONgFHDsiRauSNdzqdWktWuqhIj+lFDurhcLCOMwDRbDbTmzdvNBwOlclkbNL5ZDKxjdztdrYWbDjPwO/huTabjbXwxQmh/XXTocglkALwu7jPeDxuRbOk0GEt0frC9sGI8JzSkT2kGxTrBGNHETAMzHq9NueNTpxhfofDwdLq2AVsKLIAQBpnCVNVKpXsPhi+ibwBuQl2Apiw14FAQMlkUrFYzBzAdntsQzwajWwtCWrcQMINVlhXnO1kMlGz2VQkErECaAqzGfRIe2QONCz1fD7XcrnUxcWFza/Y74+af7/fr2q1qs1mo06nYxKKZDJpw1A3m8f2w9j+V199ZUHWfn8sssX5LxYLtVotm8XCM/R6PXteHCuShMFgoMlkom63a1KE9fo40f7u7k7hcFjD4VCvXr3S4XAs0nVb8JZKJaXTae33e9Ns84yLxUKTyUSDwcDWAseLbTSbTd3c3OjFixe2fjhlGPDtdmsypGq1av+OPfN3BnniT4LBY9E3MgtJxnbSNjibzSqfz6tYLJ5IEPi9DPLEfrE5VybSarXMJnK5nHa7nXVZw4fhVwnYCRwY/ol/AyQp3gVIONOxWMxAG38Mg8q9Exzy71wE1zDU7te864dfHkZ6GOlhpIeRHkb+/DHy+66ftGmGJEu1cfFgbCJ/B2B4CN7e0b26b72ACs6fwXNoo2EvYHtI7QUCj0WGvJnSdhdGzE3vspD8G+yguzF0MUK7itbblTZst9sTxjAYPA76Y/4IzoWDzTpReAvrBCPEs+CwATDu1y1qvb+/N2032mkYIP7ARjCPA2YSmcV4PDaWBNYUAIbBIl0NCwIj4xY4h8Nh9Xo9xWIxAzba1br6YQYjTqdTAzqAnEMsye6HQk7sDI2tJCtqLpfL9jthLVz2A7sixQwbw765+mgOEoEOrJxr43RPglUdj8d6eHhQPB5Xq9VSLpc7Odw4bfTmpLulx7oLHE2n09F2u1W9XjfbiUQimkwmBqqu4zkcjgNJS6WSdXByuy3lcjkbIImMiD+JREL9fl+z2cykKzz/fD63YAdpEE4Odpbi61gspru7OzWbTT19+tQkCdhNNps1x8fP45ilR/YLlhmHeHd3p263ax2TaFVL8Ob3H7sdwYLhA2AcOaPMFpFks0Ww78PhOGhxv99bK1pJ9juy2az5OreOAbkJLZNTqZT9DjejwUwhv99vbZSxA+yJgnS35sS1YVhOAI/7hW0jWEJvD0Bi12jNARP2AjkQ5x121v1/73q3y8NIDyM9jPQw0sPI9wMjv09S+L0vXD6f7/9I+tPhcBj8ue/hQPKAaCxd3aObIkbvyaGBMeNGObCk8Uml8kaMk4eJwxlyoDFIipR9Pp8V8sFukWaG0eD72DgKF11nn0qlTjrycMDYPADH1a/T8Yj7brVaur6+1m6302AwMCDudrs2IG46nSqbzSoSiRjbBSCRiqa1Js5Ekrrdrtbrteld0TqTTuWzLy8v7S0dmcV8Pj9JoTO1nM/CsAqFgtrttjlBNOlIjWD+uJii3u12td0eOy5VKhVjwPL5vB4eHnR1dWUHodlsqlAomIPHAXK4XFYK6QX/xowJF4Bg/8bjsRaLhT0j7ODhcLCZMQQ2/EGCwMGjZSxgA2u73+9tH/b7vd1/r9fT06dP1Wg0jLGOxWLWPQn2hI4+2WzWApvNZqPLy0tJMnDn+6fTqWq12klnnUAgYMzPdrtVoVAw0Mpms9ZJjO5IboelZDJpem709q6ECQeCY6GTE46Vc75er3VxcaHlcmnP8vnnn6tarSqfz6vb7do8Eeoa4vG42TcF+DCVnU7H6ij8/uMg1NevX9s6EgTB7I/HY6vZIFMAg9tut1WpVAx0XTYcFg92cLPZmG0R/G63j/N2YL86nY52u51pzWOxmM7Pz9Xv9zWdTrVYLFSpVE5qb77lX5XL5ewcA9w+n8+kSYCVK/lhf8ggcHbpBEXjAtd/AaQwnIAKQQL3yNrANLu+F5DyrsfLw0gPIz2M9DDSw0gPI8FIl2j49vW9L1yHw+H/+76vY2BozF39J//O31lMV47gtmmE+QCcYFz4GVJ9pG75PDdVu9s9do7x+48zONAWIxdA8w0ThN42HA4bU8Wi4+A4LIFAwLoH9Xq9E30oQwElmdOTZAxDoVDQmzdvlMvl7I37/Pz8xHkAFBTh0kUJKQUsF0wRDB+Ajc4b44Kp22yOcx5gyrLZrILB4xBTHCNOge9B8oJ2XpKlfqVjsAC7gBwBwwb4cPocbg4TIM7cjHa7rUwmY99DChwGAvYkmUxqMBhou93q1atXSiQSur6+NvYFY08mkxY0SDLHt9vtNJlMLJBw95SUPfIObAB2lmdBpy3J2Fy+B0eJFIaf++CDDyzgYC7Mzc2NOY77+3vrNoX8gbUNhUI2f2e7Pc69wZl0u137/dgKzoZhmbBZ3B8p/el0anr3ZDJptRs+31ETPR6PrdUu6xEIPBZ5Uz/CYMRisWiDIzl/wWBQb968UaPR0OXlpclnCJT++Mc/qlQq6YMPPlAqlTKJBP4gnU6bXeVyOQNVniEWi5lTpWaCwI1nns/nuri40G9+8xvd3t7q/v7edNfSkRGG3ez3+yZHAIQ4jzCXkUhE2WxWw+FQm83G2jBLx5a2iUTCzgOgQ3ALa0wROrIHfCBF625g4zYY4HziCwEBwBkgpsYkEDjWNCDX2Gw2J13I8JucScAUqRuzYmD/OPfe9Xh5GOlhpIeRHkZ6GOlhJBiJL/iu6yfpUsgBJEXupt8ADw6wJEufw7jh3ElPFgoF7ffHrjowDHQkYiGRXPBmi3FwQGBvVquVFZ3iAGCEXGeHM2aBMUS/32/3S2tQChkXi4UVzqEX5+DTKSWbzdrBPDs7k8/nU6VSMQDbbDZ68uSJrQUD8Pb7vX7xi1+YJMJts4lR4tQ48IVCwcBpuz0OEsSQ+QwcJFpWvo5uulwuWwo7n8/bfAbmNjCPAfaR1qocLN7ukYiMRiNlMhlL/TPkEDbk/PxcnU7HGCbkDTBjOGoO1np97Mjz8ccfm03l83ml02lLZ9PNirTzaDSyQwdbzKEklY6T6Pf7KhaLFtCgSca5LxYL69hEMTesWigUsqJngprVaqXxeKyPPvpINzc3KpVKxrIEg0Fj35AINZtNZTIZZbNZDQYDpdNpmzUCq3t+fi6/32/DGmESAdDFYqE3b94Y+4fzJzDJZDI29HO9XtsZ5PzSSQpQx9EATgQQ19fXNsgSpi8Wi6nZbKpSqcjn8+nXv/61ut2usaiSLDgolUpWXwETl0gkrH6BtWc/AfvJZGLgHI/HbS8JQHw+38l8nFevXqlWq5l0BNYcBpOfoYX0ZDIxFn86nZ7oypfLpbrdrhaLhWq1mvz+YzF3KpU6ke1st1tjDwlM8As46+VyqXa7bfUI0nFeD8wdrCAyL84V7B7Mr3u28QlkEFztOkGidAQvshHSo3yCP9gvZ4jP5fm8690uDyM9jPQw0sNIDyN//hj5fdePfuHy+R7nJPCg3CCsFxtGyhbjR4PK4uIsSIXudjtbADYch4Ox86aK1tNNraLLRjOPg4Kl4/DBKsEEwoZgeBzCeDxuaW6fz6d8Pq9kMmndU3D8sDTb7fakyBFjq1ar+uqrr6zoFiar2WzavAhJVqjI4RiPx/Zs6/Wxk1IikTC9PU4ymUxqPB6r2+2qVqvpcDiYjGE6ndrndrtdjcdja0cajUZtICT6+UKhYKntdrutYDBozmw8HpsufbFYaLvdmq59vV7bYeXzf//73+ujjz4yBhGGgo5K19fXBugYNcwtOmq3hSjpZQpbcYIUKM/nc1t7nBE1DOv1WsPhUO12W/f39/rbv/1ba6VM0LJcHudYuAwuTh92ms9D3xyNRq1FLXaJw8xms8aC5XI5A02KUbPZrKbTqb7++mtJ0l/91V9ZATlsynw+PwGReDxuzCNOHgYLVomfzWazKpVK6vV66vV6qtVqZhPpdNqYcTrwcB6oA+FsxONxJZNJFYtFBQIBXVxcqNVq2YT4xWKhRqOhfr9vrGSr1bKADE15KpUyp/7ixQv1ej07NzB9sM+5XM58DQEfzDhBD3M9YCKpq3j79q1ms5kKhYKeP3+u9XqtYrGobrd70iK3Uqmc+AQccaFQ0Ha7tc5UzWZT+/1eT58+NSkSjOFms9HV1ZV6vZ6x8vzhHOKkg8HHmUN+v9/YaaQbBOjYFbY3nU5NMoI/mM/nFswmEglNJhOTvDCElDVBngJo4x/dGgdJdr5gvmHwvOvdLg8jPYz0MNLDSA8jPYz8STJcMFpsAOwdbB0HjwXlTRunjCZdkqX2+V4K+3K5nB0WDFaSAQRv+qFQyN6kSd3DrCUSCUs5InWAHSKFGgwGrVVotVrVq1evDKjYWFiO+fxxxsBkMjHdNwwULBUGtFqtdHNzo3w+b1pXWJVQKKS7uzuVy2VtNhsr2EXyIckKByXZoZvNZuaIhsOhtQVOJpN6/vy5er2eMXZ0GkLL2+/3NZlMrAvRer22AwAzBbOCpGAymajRaJj+n9Q6LUgLhYI5umg0qkKhoOFwqFwup1/96leSZAzXcrk0bfR2e+yYk8/nbe9Z8+l0qnw+b2wp9+T3+21ex2w2M505RdcEJjC8rB8BDNKSDz74wGQFpLHb7bZms5ntAQ4DqUs8Hrc2si4LPRgMDISm06mq1apqtZrtN4EDYEydgCSTM1SrVb1580b/8i//or/7u7/Tr371K71+/dpS5wBhqVSS3+9XJpNRu922Qtt+v69cLnfijOLxuEKhkG5ubkw6Ua1WLTgLBAKq1WpaLBbGCFEgjt3s93s7d6FQSA8PD7q7u1MgEDAgjEQiKpfLev78uQKBgLUiJoDZ7/e2brlcTvV63RyZqx8nGKQddblc1r/+678ql8vp4uJCX3/9tQWxAB8MGfcN81gsFq0AGAZxNpvZmtO1C8kTLY2xMQAPoCEzgIQlk8no/v7ezs1kMrEAlK+7GY7VamXBKkHAZDKxomjqHWBN8T98P2cYxrjf70t6rDnAxxJ8b7dbGxaLLMOVoLm1NtTbuIG+y0Z7ksL/3eVhpIeRHkZ6GOlh5M8fI7/v+kleuNAU4ygOh8fiahwsmkuGzfHWSqEZBiod2QRYO7TOLCCAT5EeqULYuWDwODE+EomY/hOtNguOHCEej1tak0nnsFCkNnmDh2Hka6S7D4eDbm9vzYnxvX6/X6VSydLsMDfr9fqkxeZyuVS1WtVgMFAulzMtKBpd2E7Sn61WS4vFwli6drut9frYUpbBjZlMxoYaApCkQyninc1mms1mxg6iiWb+AZ2q9vu9nj17psPhYFrXcDhsBbYMw8Po0PD2+30ryhyNRlosFjo7O5Pf7zf2AEeP/KTX6xlYx2IxZTIZ9Xo9Y4xIqS+XS6VSKQNxn89nTCjsGb/XrXuAAeRQ7vd7+zotQQFuSfrwww81mUx0f39vTCx6XhhqagH2+70VtJZKJU0mE0v95/N5PX/+XP1+39L2Z2dnqlQq6vf7BlBo/yuVip49e6bhcKh/+qd/0m9+8xubTUNdABIfl8GcTCbG8MG6Ii/ZbrcWbKRSKaVSKTWbTdVqNesyBNAS5OVyOW23j/M2cGxIZXK5nJ48eaLb21ubJ7PZbPTNN9/YeeVevvzyS3344YcaDAbGiBHErFYrA1y3/iGRSGg8Huvs7EydTkfD4VB3d3f667/+65MA9HA4mKyAzkcUzWezWY1GI5M3AQTMQCkUCvb7kMrA9OIrYOy3263Ozs6s2BeAjsViqtfrJk9AmoPfIpAGWPhZmF9qZJibxLnn2QiICZI2m43NN6LlsCsPIjigIBypBIGC67Nh9vDX/Cw2ze+nUJxmCd71bpeHkR5GehjpYaSHkT9/jPy+Gq7AZ5999kPw4s9e//iP//gZzA83xds4jCjG6d4kxWccTBd0MAQ2D+dHAafbTcllC4vFojneQqGgSCSiTqej1WqlTCaj/X6vXC5ngBSJRCylyAYCMsgx3ALjYDBoBxHWEceOJATGgM9iM6PRqCqViskr6FqFjleSsXwwCNlsVrVazaQm3BPruFgsNBwOFY/HVS6XjQ1zGclKpXICqty3q5Wl5SoSA54Dw2m1WvZvnU5H5XLZnMVm89jN5nA4GIu1Xq9PWs4iOymVSsZWxWIxSysDwLvdzuQnLjMDaxSLxSwt7/P59PDwoPF4rOVyqV6vp3Q6bX9ndgOH3z3YFG2n02l9+OGHkqTnz59rPB4bW8HQvVAoZBp4wI7DNZlMTgo5s9msnjx5crI+pVLJWMhKpaJOp6MvvvjCgldsb7vdWgtbnAuMeDAYNHYJfTvMIfpz5prADFFEij4adq1YLFrggvZ+OByq1+up2+1avcNqdWwx7Rbcco63262xu+v1Wm/evDHm9e3btyoWiwZ4biqeICGXy9n5eP36tWUAOGfpdNpm0FQqFd3d3SmVSqlarVq3Ith3Aj6CGQYvUnOx3+91eXmpTqdj8hN3KCTZgtXqOLj08vLS9gYnyxlLJpNKJpMW0ODckUPxPUi50OwPBgNj2gkWkXvMZjPzXwynBdTdi6CRPaVegJoY7BsbhTFkPgvnm/0AuAFYzjB7QCDh+o9AIKBOp9P47LPP/t+PAo736PIw0sNIDyM9jPQw8v3AyMlkot/+9rf/97uw4CeZw+XqxXGeFPi6BaI8KKDABpK6I12YSCSs0xAHJ5PJGCjBDrKAMFnSsSC40+moVCrZXAPSvP8VLJzcO4cK5qBYLBrbwR9JJtXAISMXQIfMfdAVBZYKo/f7/WZcu91OZ2dnko4FgOfn5/q3f/s3M9DpdGqskCRrRQoziIM7HI5TynEcsICkOrl8Pp/S6bQZR7/ftwNIAfXd3Z0Nq2s0GsYi+Hw+K7pMpVK6urpSPB631CwA6M7dwMExi4GCwsPhYO1aO52OZrOZyUlg+jqdjgKBgDkEimUpnGS+DM+SyWRMZgED1el0tN/vdXZ2pnw+b9KG7XarVqtljkGSer2e7u/v1Wq11Gq19Omnn1rKGxuBIWQ44WKxME0xLEk2m7V76fV61haWgAnHf3Nzo08++cQKrAk+mP+CE0faU6/XlUwmNRwOTR8Ne8MZYB8Aamw2HA5rNBoZq5zP560DGFIAtPcugNMxzN1LbMHV6iMbWq1Wenh4kCR98skn+vTTT81+M5mM/vjHP1pL58PhoC+++EKRSERXV1cql8sWPLJH/J3idPTZnJdvvvlG+Xze7osieWpPttutsbAEkO12W7vdTqPRyLpFIX8A1GDPABqACHCgyB8WzGV7CdKCwaD5LP4NAKaAlxkvBB8wrjDtsJCbzUbj8dgC3mg0antGBoFABKCnID4Wi6nT6Vjba2o++B34LPwT0i2YcAJAfob99uq43v3yMNLDSA8jPYz0MPL9wMjvkxX++dzXO1yuk41Go3ajPp/PUrosKg9MCpafQfIgyXSXpCsBHd5oXQd/OByMDWNDWBBaS6J1XSwWNq0dNmI0GqnX6xnLR4Efn5XP5607z/n5uTlXGEBSkhwyQGk2m510ZKG4sdVqSZKq1apJIdbrtRVA0l5zv98bg8nb9WKxMElENBpVsVg0p46enSJR3vDRVaMZ3+12xhwCAIlEQo1GQ41GQ9vtVi9fvrRDzbBM9NSLxULT6VS3t7c2g4O98Pl81j2oVCopEAhoPB6bPaCpv729tVaibiABCLbbbWP+crmcDakMBAK6u7szm0DmkUgkFI/HrbNVKBRSoVCQ3+832Ua5XLYhhTCJMB/r9Vq5XE61Wk2hUEhff/21AVqtVtMnn3yiarVqmnNsPhqNqlwuG2gSuLBn2+3WCtKbzaY5xM8//1yRSMTaGqdSKZMDYPdomZvNpv0utxCUwA05jvQ4MHA0GplEJR6PKxwOG6sEEwgw5XI5LRYLPX/+XMVi0SbOo9VvNpvmDGHV+HsoFDLd85MnT9TpdPTP//zPuru706tXr4w9urq6UiRynBFDDUksFjN9/X6/14cffqhf//rXtg+r1crmh7RaLbPj/f6xVSuBKDUcOD2CQBxou93Wzc2N7dFmc5zfgpwDH+H3+63rEeCDNMhlSvmMxWJhbYoBZPwbTp2iZ/TuMK0ACLIQzrDbUQn2HJZ4NBoZcBwOx2J5gBQwoBibmUPdbvekvgCm3ZUMwdhRZE8htcv081ze9b+7PIz0MNLDSA8jPYx8vzHyJ6nhQv4As8YbXjAYNEeDNhr9Nzrp7XZrDGA4HLbPIh0oySQW6KxJA+OoYTK4GD7osliAnVtkCrPS6/UUCASsYJGNYT5BOp22t2/0z6Q8Kf4D6OjK4/cfO8zM53MVi0V74wbYSFninIbDoa0L07TRHKdSqROWZjAYaL1e2/BAuiqtVis9ffrU2AW6zJDuXq2OQydhqdHrIjlxWTQ37UrnFtaNIk8G7pGaRu5wd3dnXYMCgYDy+bwFEDAFDPRrNBpWMHk4HFStVs3pkJbfbrfmjPP5vEajkdLptHq9nrbbY9cn2gXTXpe1XCwW+vLLL21eAmwRwQCsFAERNkfxMM9Hi+ZCoXAi4YB9BDhISdOud7c7DvOkW1e327Vhhblczpw9rPNgMLC2z6TYb25urKsW8iOXseYeOQcwfOz7fn8c3Hl3d6d4PG61Eq9fvzamrlQqGcs7Go00GAysVTDBIBIAVzMNYxsKhXR1daVGo2HsMg50PB5rt9up2WwqkUjo6upKq9XKpCmcNYDA5/Pp7OzM2FIK/rFNOk6Fw2G1222Vy2U7T51Ox4AQNu7zzz/XbrfTRx99pOfPn6vb7dp+uM7UDUYnk4nq9bpCoZDValQqFYXDYUUiEQv0aHncaDTMb3U6HVsjSXYG2S+3EJzAeTqdqt/vmy31+32TbTEniYDVPcvsfzgctsCFsx0MBjWfz60bE8HMZDI5ARgYUgI0WEiYZXwGMjHvevfLw0gPIz2M9DDSw8ifP0bSoOO7rp+saQYggmGThoPB4i0dJ4yOGRaBnwc4EomEOV0OOs4abTrpdHcRksmker2eNpuNvWG7bFY8HjdGCR0yB4auShhCoVDQbDaz1CgOnZ+BtcNpouuMRqPKZDKaz+dWuIdzqVarZrRucSBMFGu0Wq20Wh0H51G4vN/vrQvPYrHQ27dvlU6nlcvlzLCy2axt/HA4VKPRUCQSUb1et58nPYxumXkaPM/FxYV8Pp8B3HQ6NcNijXkmahPYG54DJopnIZggHQ1zRecqilFJqSM3SKVSJtcIh49zSh4eHozhgBFJpVL605/+pFqtZvNGcCqhUEiZTMakCPP53BzVarXSxcWFMZFPnjxRtVrV27dvFQweu3FFIhFjCdH1wiiHw2GTPiyXSxUKBW02G7VaLZVKJfX7fXvubrercDisZ8+eqdPpWNDAbI3r62tjXH0+n0qlkmq1mvr9vhqNhs7PzyU9ynIIIGj7DEgDikiXYOzOzs5sbs9gMFA0GlUikTDdOzNpkLUsl0tdX1/bvm63Ww0GAytW73a7xtYvFguVy2X98pe/1O9//3vl83nNZjMNBgOTPdGtDJnIYDDQxcWFEomEFRmHw2Hr8AXwr9dru29J5hvq9bpKpZJpsMfjsZLJpElGaHl8OBwLf2E0kRjQWhqbI6CCOUMegSQCSQIMLT5kNBqdFAIDyovFwoZ7EuBgOwTD4/HYiv1d2RlBNi15eXa+j9oYfpZGQh9IAAAgAElEQVRgDB/E82w2G5PCRCIR5XI56zBFYOKycwAL98f/8zXv+t9dHkZ6GOlhpIeRHka+3xj5k0gKMWTXSfF3NgD2zH2rhTXCUaNPxVBhyVhcJBc4dUm2QbASOCvSm+l02tgOtMb8fpdRcwuX2cj5fK7hcGhg0ev1rLC02+1aMajP51O5XFatVlOhULCZBBgyAMEbNGlNn8+ni4uLE/bYHQqIfls6AjbPTKFvNBo1ZuPp06fGOtApqNFoWAEjvw/giMVidigoEsRoU6mUGR8OdLk8ztpwO0oVCgVlMhnbG4qRI5Fja1GGB5L6dpme0Whk6wGgwcSVy2Xlcjlz5tzLbrez7kM4coqUGXI3Ho/NaUiybjyAQTAY1NXVlUKhkIrFoh16isjX62Or43g8bvMeKNx8+/atTY4fDocmtRkMBsagzGYzayPMPUYiEbVaLWPper2eBUGHw8G6ja1WK2NaKHz2+/2WTqdL2O3trRXBU6wJY0dws9vtjE3EYWy3W5VKJVWrVT19+lSlUulEm4wNwPTB8IzHY3U6HasR4ExytpPJpAGoJF1eXqpWq9lZo+NYPp+3TlKSTPZCAAYbyP6+evXK6gTOzs7MYa7Xa3U6HTUaDbNZipz5LNg89PkAVqFQsCL+1Wqlu7s7s3kYL4Jdl9lLJpMajUbq9/sWYGDrMKfBYNCCPwIk/EMwGLQA2mXb8EFuy20Aw+/3W0czSVbjglwER+8y58ggSqWSJFnAACiNx2MlEgmTomEjZE3wjRR6A3ysM+y2d73b5WGkh5EeRnoY6WHkzx8j2bvvun6SFy6MwtWPf5tV+Da4oE9HYwtrwOfxcHweGmtS6aRROegUZ7LYfA1g2e/31vYWJpA3aelx6jT3ig57v38sCAwGgyedeHhTJpWM4+/1enawACfS1n/605/UbrctDUpBMqwGQOLq9+nw8vDwoOFwqM1mY88bDAb18uVL7XbHlr9fffWVdSFCq47DRiZB6pWWrBSHSsd2wzh99LiA4cXFhQ3cRFfLzwHirCldbmAzKMrEWUynU61WKzssdLxx9fPSIzOMVGM4HFp7YBg2utjkcjlj63AsOANqDyjYTiQSJs/o9/vWsWqxWOjFixe6uroyZgknjz4XB8kh456x+91uZ8W5SC1g6mjrezgci2UZkOgWegLK7MtqtbIZK7vdzlgt0thusBUMHjsSoc8mIJrNZqbNp3tUIBBQoVAwB4qdZ7NZVatVmyfT7/dP5qlw0S4Z9jsWi1kgEQo9zr4ARLEjzsl4PFa73VaxWDTAxOaDwaDZE0wutkjBKoDA3iKvgeGnZiWfz9vPwLoCCJFIxCQFoVDIZu+Ew2Elk0mVSiWTl+CwN5uNOXtsCnkRQZsbOAPWrDtgzzPBBJK1wIZDoZBJuWD4OQfr9dokZzBuAAFNCmgwwGdhX0gyCEj442ZR8C08C4EG/tK73u3yMNLDSA8jPYz0MPLnj5H46O+6fvQLF04NJ8WFE+UNGGeH9pnv4TNYDHSibuqQQlYOLRIGDIm0KBtC4RwbV6lUrIMT7Bgp9kqlonQ6LUkn0g6cL6wU947+lA5D2WzW2pTydr/dbpXL5U5as6LNpjB4OByaQ1qtVna4eDacA0YHE8gQQwor0VA3m01jysbjsQaDgTk3ZA4AMgWjOHpXH4z2HObQ1dU+PDzI5ztqtNGsY9SwQAA5WmmcN/uNfAImEecHk4DB45AozIRZJyWOrr7VaqlSqVjXqGAwaAP6cDDcPw620+mYk8dRw4xIR83u7e2tyuWyrq+vVSwWrUOS29UL1hknC3sGSwwz7Pf7bbjlYrFQvV5XoVAwpw+TAxi49kgg4AJaMpk0CYnLuiHlIT0P00qKPxwO2/fhEHe7Y0FtOp02CQbn2OfzWbtgHCOyGSQd2+1W7XbbnNpqtVKpVDJGrNPpWP0GzwArhcwHtgz9NAwhbXG3262++uorq0fAZ0iywGC321ndBsEj60VtCd+HzQHMkUhEw+HQ2LhYLGbrB7it12sLHGCEeV5XpuLWSOCzkIEQpBKUSPr/27vy6Lir83pn31eNNBotlmzLNsY4psQrhgSzGBuMDYQlTWh6TmtImxQCDUsoISVpSxsOZGvJH06zkBKawGFtWVowkBwIa8DYBmyDLVn7MpoZzaLR7P1DvZ/fCFm2QA7YfvccHYw0y+/3lu++3333fZ+cy2HfUFkkAdAaxjnMNic5kiDVv1cqFcTjcVFG0+l0lY2N446LYMZX9bNLpZJ8PgmFcVFjetAcqTlSc6TmSM2RxwdHst0nw4ykhVfVJNUWofrWeVEMjnx65JO6agtgsGFnTHbYmCTF96mV1jk48/nxCtrNzc3o7++XA45UnvhUzBoDJDn+l5PbYrHIFiT9nlTwOEl4PQwyDocDFotFbA5UKT0eD8bGxuthNDU1SRDn5GQAYGBQi7mR5KhccNuVh15dLhfq6+sRjUaFSDnwAYgKQ6IGIBOd3+/xeGAymeTgc7lclsOq2WwWs2fPFrUpk8kIcXPLmJ7o2tpaUTC59cogw+DHH9XioZIblZG+vj65HuCAauT3+8V/z8nMBQy/gwRGdYwKELMDsZYLbSQcZ6Ojo9i7dy+sVqu0CRcaVKl4IJpjiQGXWXH4O47PZDKJmpoapNNphEIh8YPzWmmboX0kmUxKdjEeAOecYABS5wAXDDwYSuUomUwKOXPBzIxXtLLwfWrbqxYim80mJErQqqKqj7xf2lqY6lb1wVPl4pxRPd1UbklY9HUnk0lZLOXzebH0FItFsQtwsUerDOMSxxTvmWc0eHDZZrOJ2uXz+aoWtlSz2ea8ZoKLN6/Xi6GhIekLqthGo1HiB2MF7V4keYvFIjVvSDRsEx4iJ6mWy9WJEfgZagzlIkklC8Y1jmOOC9UqpsY+vo/zh8roVOqdxuTQHKk5UnOk5kjNkZojZ+SBSx0UfMJTiYVPfnwNG5MDi9t1zPTCBuKF80a4XTmRXPh55fL4QUdWkI9Go/B4PDCbzXIQuFgsyoSNx+Ny6BWAKGFqJigGH7vdDqfTKUGXk5qHIfmUzglLxS0QCMjgotJDvy79srRDAON2ARbD42dks1mZOGwDtgvVlnw+j87OTsyZMwejo6OSUYf2C6NxvJ4IB4bRaJQAwyDGrWO+dnBwUA7kejwe8f9TXaWixu+nn5nV3KmisH8BiNJD5VHd5iU4Eai8xeNxyYI1OjqKdDqNVCqFsbExKWZZX1+PbDYrtVQ4Fvn9fD/7n9Ya1lgpFotSTI/XYDQaxS9OW0GhUBAy48FWBkCmP2bf8CAsMyFRdaQCzXFuNI7X0GDApCKWyWSkUCX7xeVyIRqNwmQaP0zP7XYGGy44mF44kUjA7/dXqUDcpjcajVWLAWaZoqeeqhAtN1wQ0YbD8dzU1CTWIo4JWg+YBWl0dFS+n/OLQZZKtslkEi+2mnXIZrNh9uzZ0l51dXVVNgkSIFVjErrFYhGfNgmN4x1AlYLF+MEUuCyqycP7JGGqf2raYfY5Dxuryi3foy44Sc7cRSiXy2LtYLxgLR4uLEmqnG+Mm/w7YyH7sVwe9+wz7nChw8U7xy4AmYMc+xMX/1SpuUDRmB40R2qO1BypOVJz5PHBkVNhRs5wMcCxQ/gDQII+L4aThzfCp0nggEecQYgqDH/o72Qn8t88yDc0NIShoSHYbDaxMtDrCUBSb3Jrkof76KXn99JXTwLJZrOiEJCcuA1O24Xf75d6GLQC8Nqo+NTV1aGmpkYCC7dZY7GYDCr64mmbIHFwW1TdwnW73YhEItLxw8PDsFqtMsh4yJHKAL/X7XZLNXNu+1LlowpjNh+ooVGpjBe/pJ2Cbc8+p9pK7yy3Z2kboKWC/cfBy/+nXYCHE7ltS2WF9TVIVkwPynFH4nW5XDJ56WNXgywXLBwPAMSqQrLmljUVHvYnyd5iGU9PykUU1Q+z2Sy2A76OY9rr9aJQKCAUCskii2OCCpvFYpGtfqa9pY+flgAGFY4T+oWpPKt2BraV6kXmuEylUqKuc56RlBgI6Tnn4WSqs5wHnDNMx8zFeDabRTQahdfrRblcRkdHhwRk+vo5/thuauDl/ZCwAUjmJh76pw2J5MA+5nkJBk4St9FoRCgUEkvPxIUZxxDnDccALSW8dxaqJBHwvqlaqzGQcY9jmGo+38v+5X0DkMUKyYbWBRIF56RqN+NCjud2SLIkZra7ehiZfct2m7j7QEWZcZoLEPaHxvShOVJzpOZIzZGaI49vjpyxOlwMArxJqia0CqhPg3xSZEcxOPLJkuRAMuE2HRsUgDxxEybTeAHB4eFh7Nu3DyeddJJkXxodHRUPOycvt6AZzOhJ5RMwX6f+qMEtlUphYGBABj2vjVv+nMxsE/pVY7GYBN62tjaUy2XxtLPWBlUOvp9bwYVCQQZBLjeeDreurk6CADMNUd1xu914//33ZcJyoHJAWq1WBINBGcRUzVjLQt1appLC4GI2m0UloCLILWfVpsDJa7fbpYAgLR/8Tto3OFZ471T9mBo4l8shEolIgOAixWgcP9PAA99UJhi0OZbolWewoI2Er1eJm/3PQFIqleQgptVqFYsEVR2LxSLXQJsN7RjsR1oiVFWHpMlsVEajUQ6M0/qSzWYlGHIBxerqVHRYq4T3wPul9YNqWzabhd/vFzU1FAphaGhIgj/7jWOM84N9RDWOZMQD5bQecfFA5Vftx4kWKvqwae+gxYfxpFgsSgpnjqOxsTEJerTYMFMbFTUAYnOKRqMwm82iRo+OjoqVitnQGJcm7uCwjaiI0tvPQ+JsXy42qB6TzEneXMDwvYx1HAeqLYFjSc2QxnshgfH9KqGp9gueqeE84XtIDCRvxk++l3GJ4BzlPfJaNaYPzZGaIzVHao7UHHnsc+QRtRQCB54i+bTHziYZ8MLU1/IG1MNo+XxeOlB9uuTnMKDRAsAnbiooPp8PbrcbPT09UhF8eHhYFEGqQrzGYrEo9QVIJOVyWQ6QjoyMSCAfGxuT+gU9PT0YGBiQCUoLB5+QOWm4hTs2NibZfVgJfXh4WLZ+BwcHEYvFUFtbK9mIqMZRqWQwIzGwgzOZDGpqalAul6VAZD6fR2NjIxKJhAR/PuXz4CDtAlR+aPugiqQGYAYxqnwsOMin/lKpVJVtiWRMWwaVM4/HI4c2ubiw2WxybwDE2sC+Yn0Yptal4qPaOqguZDIZ+Hw+uV+qOyRQ2h9IFPRn8z7ZH1x4AJAzBWxXjhd6ta1Wq7QR1Uv6vJ1Op5BBU1MThoaGUCgUUFtbW0VOHDvqZOd45RhzOBziO2bWqGQyKW1GKwj7c3h4WNLvUj2jqsqUsqVSCYFAAIVCAQ6HQwqI8v65KGIfcwHCfqZlhgsEtgdtBlQFGYA457kAYJpnqpG0KFitVsRiMVHD0uk03G43+vv74fV6JdaQlJgdioTEvuC5hGQyKX3OGMSFLschxzjHF+eL+qDBRRVfA6AqExwJmfdNSxZ/R0KmxYaLW/6X6jNrllBZY+zkYo39wFiq2ic4t+x2uyjcjHVURhk7VAsGCZvgIomvVxdbGtOH5kjNkZojNUdqjjy+OXLGHrjUrTQGInas6qPkj3rIjAcgaVHgkyqDO1/LrUO73S4dTtWLqgoP/6pZfTKZjKSKHRgYkG1Vo9EIr9cLp9NZNbjYkNwy5SCrVMYrj+/duxfBYFA8u1T2eDg3l8uJ9YOEBUDUG241t7e3Y+HChejv769K1Us1x+l0SrvwKZ1qAwMHCZRtkUwmcdJJJ6FSqeCdd96p8qPTb6/aEli4jkoUr5fqQyKRkInPtimXy+jv75cBSv8tP4eqLD/L7XYjEAhIG9LLbLFYUFNTI9vvqtLJgMj/54RLp9Pwer3S9nwP61ykUikEg0FRQvL5vNQXoTrKVKEAqrIaUQVkcGW/AxD7C9uMXmgqOvn8ePrmYDAIn88nKm19fb0UQgyHw0gmk9K23ELngoTfw3GXz49nDSKZpdNpSfdLYuOWO9tGVWCpQjKAcUzTB83XciFF3zwXFiQw+u/ZdpxrVOUZWJmpif3F++Dc4AJDVeJZw4cKbalUEo8+vemMI6oFJxQKIZPJCEFy/LItR0ZGhJjsdju6urokHS8XOyTUTCaD+vp6BINBpFIpGVf8OxVKALI4sVqtCIfDiEajVWMwFouJUkd1FjiQZY5qHscXFUyj0SjqNYM64wX7jkEdGPfvc7zwvAgXiEw6kM1mJdZx0cLrYFuqBE+S4bhn2zEG8rUa04fmSM2RmiM1R2qOPPY5cirMyANXsVgUewCDlOp3VImGjcUfelFJPrwREhB9nAyovGEqf7xpqgUul0uK37ndbuTzeSQSCaTTaRkUg4ODsm1J0qI6oW4Bc7vTYrGgqakJAwMD6OjokLogDOiqt5z/ZSDggODg5d/7+/vR1NSEwcFB9PX1SQ0GBiaqByRbq9WKkZERqWhvsYwXRozH4wiHw/B4PHA4HBgZGcH8+fMxMDAg95XNZiW48MCkulVLMmQfcECRIKgslctlqYw+NDQk10EVgf3GvjMajQgGg1XKpap0MOVvNBodH4z/H2yotNHCwm1tq3W8UOXIyAjmzp0r3n8qIFRP1e1qtiMAUXlJcl6vF4FAQA6kqgoVFSC1GCXHHlUl2iFYqJDeciqwLO5I1YbKJACprj7RjkMl224fL9jJayNpVSoVJBIJjI6OF+JkAVGqfsViUc5ZqJYGZhkzGAxy6JUH1+12uyhVzEDENqMXX1U6AQj5MViaTCYJxH6/Hw6HAw6HA4lEQiwODGYMUHw9gyzPShiNRtTW1spCIJfLIRaLoa6uDsPDw1WqP5VPBmKOA8YjkgGLTHK+c3GrLnJJhLxOxjPGHbZfLBZDuTx+4DYYDGJkZASVSkWIl2o/s0Wp6hu/jyTP9uPc4z1xLAKQsUi1m2OPC0Qq7mNjYzK32ccmk6mqCCXbV42jHN/8N+c6xyVJvVQ6kHpaY3rQHKk5UnOk5kjNkcc+R06Fj/zAxYDMLDOqr5sTkAGKryXBsPP4BMsOo4JHhYedQeWLn8vgSJXJ7XZLETMOKBbR4+tol2AnsiOpQKiKGz3Gra2tiMfj6OrqkkOJVJw4Wek7pnedk4aDYmxsTDIH5XI5UaGSySTK5TICgUDVgUtuH6uKCQMqlUwSGu+3u7tblKW+vr6qOiYjIyMwGAyiUpEsVcXDYrGIAkPVioSUz+flcLTJZEIoFJI+5kDnd1HpikQiKJfL6Ovrg8PhkMFOTzyDHxUzbjdzsNMfGwqFkE6nMTw8XBWkAMDv90sgJZGpB2Lpp2ZA4za0utVM8uI2N4sR8kwAJygXBVRX6W8nYZHU2FbsS5fLhXQ6LeNB9aVTsbLZbGIZYlG+fD4v2cLy+bzUQtm/fz/q6urkTAJtPYXCgQPjPITMxQv7iH25b98+tLW1SSYoplWlDYSLQh4w5SKGKhsLHcbjcfj9flFkGeyCwSAGBgZgt9uRzWaFVKi8ErxPtivtHDwTwL95PB7x+vM8Rbl8wEPPRQpJm+3MxUptbS1SqZT0u6peMaU0bSQcewyiXODwe+mTLxQKsvCw2WyIxWJVChljTKl0IEUxxykA6XsSN+ed+jveA8+G8DM4fqhCT4ydvGamDeb4dblcVRYxLvzUhdREOwljMeO7xvSgOVJzpOZIzZGaI48PjmQsngwzkhaeh9b4w210bpsTaoACICqVeviyUhk/+Kduw7LjeHN8EgUgT6psHG7te71e7N69W7bh1cbgJKctg5/HDuTnWSzjheVGRkbQ1dUlFgmn04l4PC73yEFHlSCXG6/bYDKZpC4EK3MzxW0ul0NdXR0SiQQcDocU6mPdAx7q43YzPb0Oh0OUHQ4WHk5MpVIYGRlBNpvFvn375Ane5XIhEonA5XIhEAhgaGioattWJW4GUKoqPp9P+o0pYekZ52Sh/YJ+Y04Aq3U8/afD4ZDsNYVCQdQNtlttba3U26AaQUWWW8EMyPQ/q2phPp9HIBCQgM1JQgWCKiuJQR2XPCzLg91UlHhGgSlI2S70nTscDjk4Tb+yxWKpqu/CbXmqTaxXwf7k53GxwXmQy+XgcDjQ1NQk15PJZCRFcTwex9y5c9Hc3CwZlnjIFTjgLTYYDOKBp4WAB2Gz2Sw6OjpkcRGNRkUtY4Yml8slCimVJS5s2BeJREKU6mg0ilAoJIeVw+EwhoeHJ21DjjuPxyPKotvtlixiwWBQMhsVCuOHVLlQooWCgY2qEhc0FosFiUQCJpMJ4XAYAKReDm0UVMWpKpPoSRBUKZla2Ww+UGyTRRsZ6xKJBHw+nxAtY1u5XK5KwayeMeH4V1U5jgMecFbHMZV8kjaJkDse7BseEuc4DwaDsmDgeGT2OI5Tqsi0Q6kWCTXGcf5oTA+aIzVHao7UHKk58vjgyKkwI+xJryUDPrd8qY6wcziZqaKpN0W1QJ0c9K2rT9v8XNVTWSwWxRpB76rRaEQ8HkcoFJKBxcFhMpkQCASqbBfqwVcGBio77e3tclBUXXhQbbHZbKLKUZ3gwJk7dy7MZrNMUCohc+bMQTAYxPvvvy+KD7epef9sH3qf6S1m0KFHOpvNIhgM4r333pN0uSMjI/D5fFXFKyuV8UJu3D7lVjC3W7nlqwZ7Ti6v1wur1SopbdVFAAe/GnSoMgCQjDUkcL4fOJDlheTBgEi1xGg0CuFSAeF3A+OFKr1eL2KxmFyz1+uVMRmNRuU+6ecmMTMwUOGj0sZx4vF4JJhxzPF+BgcHEY1GEYlEJFh7PB75/3g8LmONthmv1yuBTrXkcJLyIDRJfnh4GDabTVRctnVLSwvK5bKMaQahiQRLZcpisWBoaAgOh0PqjagHsXO5HGbNmiVnJBiYWH9kZGQEACQYuVwumRvBYFAUdWbYcrvd6O7uxvz580XNDYfDsphgX/IgL8coM4jRagBA7pltQDJkcKbyy0WOxWJBKpWCz+eTbFiFwvhB2mg0CrvdLsU1eYiduwIMpqptTg3cLIIIHEjPzdfHYjG4XC4Z67x+Lp5Iopy3/DcAWRzRHkMPv8lkksUOlVTVJ656/ammU0HmvDKZTNKmVKep+vNcCQkaOLBwp8rIBTwXKPxujelBc6TmSM2RmiM1Rx77HDkVZuSBi6oLVRsAEmzZYCSZSqUiBzK5NcctegYbWijoKecTqapkqd89NnagOCC/kwoBG4aKF9Uh/nBrnt/DhmZNgVQqhUAggLGxMelAbq8DqLLZsEM8Hk9V57pcLrS0tKCzsxPhcBiNjY3o6uoS0nG5XOjt7RVy4wSkssCCd1T8WCulUqmgublZbBChUEjukX5jHtyMRqPI5XKoqamRLXiDYfxwq6ooMHByArndbqTTaSQSCVgsFkkdSr89r5nfyXZQ1TISGScCFQdui7PwIf8fgKharK3BYKJm2qE9g4GEKiInt8FgEKWWROJwOKpquHBsceLwWump56FZYFwlYrHEbDaL5uZmsYHwx2w2S9Yhet9ZM4SLDtoXeOCUZAocSJEMQO6dr+E44Os47rlYo9LMucLDsqrKSkLgnKTVaHh4GIsXL8bQ0BDy+TwaGhqwf/9+eDwe8dqTbEwmk/jY1WCsbtc7nU7s3bsXfr9fiJUHcumrJrmwbalgMUuXqrIHg0F0dnbKooC2HS5S1RogJKp8Pi++dJ4ZGBkZkYUUME6AVONHRkZkTNGexP4gkXEsUqkmaVCx5/UyHpIMuQjieQ7OMy5ueIAagKhqXGSqmbNo62FM4/2zj/kZqnrNPuN1qEoe+4/fy8+j3UT9HBKLxvShOVJzpOZIzZGaI499jpwKM2Ip5CTiIGVgZ2Cj8sHgzq10HjDkDwmGXndOWk5oKmpsLHphx8bG5AmYaV4ZSPlUTu8zlSwGKqpWlcp46kw+7fKzvV4vcrmcpErN5XIYHBysmsTAuE+a27hOpxM1NTXIZDJCPpFIBD09Pairq0MoFMKePXtksNLjykFbX1+PfD6PoaEhCU5WqxWtra3iEadaMzAwgLlz56JYLMpWPgeM3W6Hz+dDf38/AKChoUFUIR6kpDJDe0AoFJKJyNSt7B+DwYCBgQHxAZMQOGm4IODBX4PBAL/fL/5wn88n7yuVStKegUAAPp9PvPr0L3Ngc3FgMpmE1DlpVTWGkx2AHPDl/5PcGPT4k8+PH9jmwVMetmXmGvU8BRcqTMFqNBplrHo8HlFC2F4OhwNer1fOA+zevVvU1kqlAr/fL4TNQ/EMULTPUMHjmQiOOR5KzWQyKJVKsvBJJpOSMplKNG0uo6OjqKurg8PhQE9Pj/iWqcYlk0nU1tYiHA4jnU7D7/fDarWiqakJiURCDjiri7ZsNgufzyd9H4/HEY/H0djYiGg0KsoQlUoAkjGNpEvbQiKRkDMbiUQC4XBYbA30vtMHDkBUqkqlIvOUfUrfdX9/P+rr6yWQU3Gjj9/tdqOvr0+sCbwu9QwDfeRsJ5JnMpmU+GE0GqXeDxdeFotFFko8nMs+5nynx54EAaBqUcm2prqsFqblnOMYJTGoOwEkHXr7VaLgAo/kxu/h79hWjPOq0qxx+NAcqTlSc6TmSM2RxwdHToWP/MBFXzK396gq8Ca45c2nSD4F8uLVwED7AQBRVEgiHEz0XVLlK5VKaGtrQ6lUwv79+2EymeTJ3uv1wmQyyYFJbiUDqPLEs44DfZj0lBaLRakHkc1mhcyoNHDCMnjTc8sn+ng8Do/HU+XPzefz6OjogM/ng9lsxqxZs9DV1QW3242WlhZUKhXZ2ubTeaVSQSQSgcfjwXvvvQeDwYBgMCh90NXVhfr6eoRCIdn65wQfHBwUny4nLQ8sckDHYjEJCgzkaiCmYqAqTvz8bHa8OGIgEBBfP++f/mWz2YyhoSFRa0hCxeJ4il5mcyoWi6irq5OzAiQeDnbacHhGgAUELRYLenp6UFtbK203Ojoq/nH6ibndzOBjsVgkc+wyjo0AAB9jSURBVNfo6CiCwaAQCdWQZDIp44yLlUwmg9raWjk/wPFKuwknM1VhZuJpamqSFMNjY2NyoJfFCTk/fD6fkGY4HJYgSa85CZSZjlh7hD5ktiUJr6amBhaLBfv370exWERHR4e8zuFwIBKJoKOjQzzwTqcTPp8PLS0tiMfjsFqtaGlpQW9vr2Su6u7ulmxeTBnL1L5qBjLadhhQ1aAKQFQqztNKpSKHVrmdb7PZEI1GUalURImz2WwIhUJC4OpCgQoh26C7uxvNzc1CnOrClwo7FzhMM8174Dhg3KpUKuju7pbYoaaC5vjnAoqLwGQyKYotVUMusjmf+BlcjNJyYTIdKBxaLBZl54ALNCp6HC8kUt5fLpeTuU1FkaSh2oZ4gJ6xUY2VjF20uGlMD5ojNUdqjtQcqTny+ODIqVwgH/mBi1uL6rYnL4hBNp/Py8FBkgsHE7c4qayxE9QBx0GvBnwOCA5uv9+PQCAg6hEVv8HBQQwODsrE7uvrQ6FQwKxZs6QT6FNlx/X29op/NJlMYsGCBahUKujv74fdbpeJYjCMH+RLJpPo7OxEXV0dTjjhBMm2RF8wJxGDBQ+gDg4Owul0ora2FolEAqVSCZ2dnUgmkzLomUVp7ty5SKVSSCQSqFQqcLvdUvytq6sLixYtgt/vR19fn6ipPORJZbKpqQmFQgEjIyNVnmBgvJBcKpVCd3e3nBWg0kC1jkpTuVxGT0+PDEC73S4KJ5/+s9mseIBpS9mzZw+8Xm9VBh9+F3242ex4LRH2PdUy+rvp2SUhlMtlWSyQvAHIYWKqKCQxdbtaPcBeLpeFeJn9h/UjqJ4xaHPruVgsiteYY5e2CS5Oa2pqRAlhQU9aaLjYokLNsURiol2C5xK4cBsbGxPlb3R0FOl0WhY1drsd/f39kgY2Fouhvr5e5mS5XBYV1eFwoLe3V66jUqmI0sNsYCR7t9uNgYEB5PN5uSfWzuF5ANocbDYbBgYGpN/ZX7RysJAk1WsG73K5jHg8DrfbjWg0itHRUdTX18Nut4uKSfsGyVddUNASpWaTop2pt7dX+oexhv1JmwdVPSqAtEzQikKvPhVlZr9i3OPY5rgaGRmRxTCVN2a4YnuTSDkXON6pHBYKBVnc8ppo7SsWi3KGgO9VzwgVi0X4/X4Z/4xBBoNBxgznAz+Tc5FWHCqevEYuQDUOH5ojNUdqjtQcqTny+ODIqWyFBtoPPixaW1srt9xyS1VgMJkOpH0k2ajqnvrkzkZTf0fCoXLDJ0Y+efLG6b0m2ZBo+D18olaDIBUCHhak95dPpnwPO9tkGs+iZLPZpAAdn9y53UzPMWtWcHtcHQwWi0WUjFKpJClEGcyHhoakqBy3eqlYBQIB8dkODQ2JvYLBiRMwEomgt7cXJ5xwAnp6esT3TIKvra2FxWKROg0MPjysTMuF0WgUu4jqTaWlRfWsUhGhV5kWGYJ9pqob9OWzvdlvfB3HARVE/tC7rqp7ZrNZMv5w0nBi8X20ZqgHP9UtadVfry4qstmsXB8nIlVddfHJz2CQouWB38P/cjHE7W5eLxVhqsPq53NyU2XimOU5DdXrTAWKNhXeD/uSyh/POnCcUnVWrQAulwtjY2OSKtnpdMr92+12mVe03aiLOHVhSNVc3ZKnt5vBTC1QyOvl3zlPaaUhyXERSCWY8YZzmfGE5K1aqpLJpFh1OEZJqMx4pfrnqcxz0WQ0GsXnzver38fxQ4WM84aqLa+PcZI7Fmos5Pu5AKeSrcZJWqa4C8JYzvjIOEcCpS9fHTOMNeqcYBwgCfL3VA03b978h0qlsvQjEcdxBM2RmiM1R2qO1Bx5fHDkbbfdho6Ojkm9hR/5gWvp0qWV11577cAHTrixwwUbUkNDQ0Pjkwuj0agfuKYBzZEaGhoaxweWLVuG119/fdIHrhlJmkF1iE+N/D3JgQQz8d+qTQKAPO2rr/ljgtdLJZI4GEFOvJeJv5up61FVzoO9bmIbH63EPFn7qffycY2NjxsT+3iyv6tQx4A6fid+xlSfO7Hdj8SY4udO9tmH6uuDzbnpzsXDua9DLZInfueHba8jEUc+DA413jQOH5ojq9+rOfKjQXPk5NAcefDr0xw58zjYuJjqnj7yA5dqZxgZGcH27dtRqYyncpwzZw78fj8GBwfR3t6OxsZGNDc3V22nx2IxvPvuuwCAtrY2RCKRgzbk4XTQoV4z1d+5pQxAtmz5HhWTvZ/bk9PBdAfMdF7/cQ/GmcSxdC8fFodqg8n+PvF3h/Oayf6mLrJUWxRB64y6tT4Z+H71cw72OvU7ppp/kxEm/zvZtUwMkrwWvnaq6z/YtQIHzuUAkGufavE52edMXIBPXERO/NuhrmliG0z0lk/3XjU+HDRHjkNz5JHDsXQvHxaaI6v/PvHfmiM/eE1HgiOnev2M1OHixb/77ru44oorAIyntVy5ciWuuuoq7Nq1C//8z/+MzZs348tf/rKkjhwZGcF3vvMdPP744zCZTFi4cCG+973vYd68eVUHQicbEGpDqYNzssGqejonXrOKaDSK5557DjabDWvWrJEie5Pd62TXoP5+MhWTUN+jA6XGJxkcu8ViUc5hMEUvz/TEYjEUCgUEAgE5b3GwecNMUkzry4r2mUymaj6w5s/B5gcP99KfzrMCTEVbLpclDbUaTJmClwvaQqGAeDwuB6WdTue02sZgGPd6R6NRyUzGZAH8Tp59YVakgy1o8/m8HELnmRg1rtCP7nA4PvAZE2ObwTCe2YoJFyqViiyU+Xf6zyfrL42ZheZIzZEaxyY0R07dNpojD2BGHriIUqmE4eFhnHLKKVixYgUefPBBVCoVnHDCCUilUnKwkbaKBx98EPfddx8uueQStLW14eabb8bChQtxxx13oFwuI51OY3h4GBaLBXV1dVIscGRkRGogeDweDA0NoVQqIRwOw2g0oqenBwaDAY2Njcjlcujv70elUkE4HIbD4UAul8PQ0BCcTqccjLNYLHjzzTdx5513wu12IxgM4pRTTpFDhCoqlQri8TgSiQScTifC4bAMKlZzDwaDsNvtiEajcqiQFdFDoZBkCtKEovFJhUoA27dvxxNPPIHu7m4sWLAAl112GcLhMF566SU8/vjjyGQyWLx4MS677DIEAoGqcc3PyeVyePPNN/Hkk09icHAQS5YswcaNGxGPx/H0009LQcJ8Po+lS5di/fr1ci1q8CyVSti9ezeee+45+d5TTz0VBoMBzz77LF544QWMjo7irLPOwvr162G321EoFDA4OIinn34aGzduRG1tLbLZLJ599lk8+eSTMBqNWL58OTZt2iT1dg5HIcvn83j++efxyCOPoFQqYcmSJfjCF74gdVR6enqwZ88etLW1Ye7cuQf9vFQqhUcffRS/+93vAABnnXUWLrrooqqaMk8++SRcLhfOO+88Ie2J18MEB3v27MHbb7+Nyy+/HEajEYODg3jmmWeqiOTkk0/G/PnzdQz6I0JzpOZIjWMHmiM1R04HM/rAxSwobW1tuPbaaxGPx9He3o5QKCTpb/m6XC6Hp59+GiaTCTfddBPq6urwwAMPYM+ePSiXy+jr68Mvf/lL7Ny5E263G+vXr8fatWuxa9cuPProo+jo6MCiRYtw2WWXYevWrdixYwe+/vWvw2az4fbbb0dbWxuuvPJKPPTQQ3jhhRdQLBZx2mmn4ZJLLsHAwAC+973voba2Ful0GuFwGLW1tfjf//1f7N+/HzabDXfeeSduuOEGrFq1SjLIEHv37sUvf/lLvPfee4hEIrjwwguxbNkyvPnmm3jwwQcRjUaxcuVKnHLKKXjqqaewf/9+GAwGBAIB9PX14ZxzzsEXv/hFKdimofFJRaVSQUdHB/7t3/4N+/btQ1NTE37605/C6/XizDPPxK233gqr1YqFCxfim9/8JrLZLK677rqq+hXAuIq1d+9efP/730dvby8aGhrws5/9DE6nE3PmzEFfXx8ymQx6enrw1FNP4dprr8X69evl/IqqYvX19eH222/Hjh07UFdXh//6r//CddddB5PJhDvvvBORSATpdBrf/e530draivnz5+O5557Dk08+iSeeeAJr1qxBKBTCtm3bcPPNNyMSiSAUCuEHP/gB/H4/zj///EMGWCr0O3fuxLXXXouGhgbMnj0b//Iv/wKPx4OLLroIL7/8Mv77v/8bu3fvxvXXX485c+YAADKZDHbt2oVQKITGxkaYzWY88cQTuPHGG3HOOecgk8ngpptuQktLC1atWoVcLofnn38et956K0477TSsWbMGDocDsVgM7e3tCIfDiEQiMBqN6O3txcsvv4x///d/h9FoxGWXXQaDwYC9e/filltuwYoVKxAIBGC1WlFTU4P58+cf2QGkUQXNkZojNY4taI48eLtojqzGjD5wcVuOhRHnz5+PHTt2SEE2vsZgGK9P0N3dLQUaAeCaa66Bz+dDoVDA3XffjZ/85Cc444wz8MYbb6CzsxMejwcPPPAAXnnlFTQ3N+Pee++V7cOHH34Yp59+OhwOB371q1/hBz/4AZ555hl84xvfgM/nAwBs3boVjY2NqK2txf333w+LxYITTzwRZ5xxhqR5JSGq26yqLzSXy+GOO+7A448/js9+9rN48cUX0dXVheuvvx4//OEP8c4776C1tRU///nPsXPnTrz++uuiNhaLRRQKBezYsQPr169HOByeyebX0JgxqPN1x44d2LVrFzZv3owLL7wQf/3Xf43f/va3aG1txb59+/CP//iP+MIXvoDnnnsO9957L6677roPBGIqgLt378bXv/51nHbaabjpppvwyiuv4KyzzsI//MM/wGKx4Kc//SleeuklnHnmmQf1Tw8ODuKNN97AFVdcgeXLl+Pv//7v8bvf/U5S1958880ol8v40pe+hBdffBGtra3o6urCtm3bJCVzsVjEI488gkwmgx/+8Icol8u46qqr8Mwzz2Dt2rWSAvhghMIdiAceeAADAwP4zW9+g+bmZrz99tt46KGHcO6556K3txfvvfeeFDbl5w0NDeFnP/sZTj31VFx88cUwGAz4xS9+gbq6Ovz4xz/G4OAgVq1ahcceewwrV65EV1cX7rnnHkSj0arCw+3t7fj5z3+OtWvX4txzz4XVakU6nUZHRwd27tyJJUuWyPWz5s2mTZsQDAYRiUQwb948ANpO+MeE5kjNkRrHBjRHao6cLg5eEvmjfOj/+1ZZ7I5koR7yY3594EBF++effx4vvfQSxsbGcN9992HJkiW46667cP3112P+/PnYuXMntm3bhrVr1+I73/kO5syZg6eeegrz5s3DrFmz8Nhjj+HXv/41amtrsWHDBvz2t79FLBZDQ0ODWBrefPNNGAwGuFwunH322bjjjjtw5ZVX4rLLLsNXvvIVzJ49GwsWLMC3vvUtLF26tEphIwk+9dRTWL16Ne666y781V/9FSKRCF599VW8+OKLOOuss/BP//RPmDt3Lnbs2IFMJoN169Zh0aJFOOGEE9Dc3Iyuri4p3Kah8UlGpVJBLBaD0WhEJBKB3+/HiSeeiPb2dimO+vvf/x733Xcfstks1q1bJwUb1Z9sNouhoSHYbDa0trairq4Ora2t6OjoQDQalZofTz31FBobG/Enf/InyOVyUliRP/l8HvX19bjhhhuwYcMG8X47nU7E43E0NjYiEAhgzpw5aGhowGuvvQaLxYLNmzfj9NNPl3oaVBP5Wp/Ph4ULF6KzsxPDw8Ny74fC7t27UVNTg9bWVthsNqxevRodHR0YHR3Fpk2bcO65537gjEogEMD69euxePFiqT2yb98+nHjiiWK/Wrx4MV5++WWk02k8/PDDeOedd9DS0iL2rkqlgkgkgvXr12PBggWicLa1teHqq69GU1OTFHCsVCrYu3cvkskktm7div/8z//Er3/9aylIqfHHh+ZIzZEaxwY0R04NzZEHMKM7XMA4YYyNjeHNN9/Eyy+/jGAwiPr6ehiNRqRSKfT29koxwXnz5uGdd97Be++9B6/Xi3vuuQfnnXeeFITj4LFarRgeHkZjY6M0GovrZbNZ1NXVYenSpXj44YeRSqXwuc99DsFgEMViEQBw5plnwmKx4Be/+AXi8TgqlfEK5aeeeiqWLVsmW5+sXM+B3d3djXA4jD179sDn88nTLgAZ5LFYDF1dXTIoWEiPT+pGoxHBYFAKz7Ea+sGyz2hofJIwUUEzm82or69HLBYTC9C2bduQSqWQTCaxatUqPPjgg9i+fTvsdrsEPofDgf7+fpjNZpTLZTidTgSDQYyMjCCTycBkMqGrqwvbt2/Hhg0bYLVa8a//+q9SuJRwuVy4/PLL8Wd/9md455138B//8R8IhUJYs2YNfvWrX0mBTZ/Ph2AwiO7ubpTLZSlmObGgLDBOGjabDTU1Ndi7dy9SqRTq6+sP2S4A5Nqo5jU1NcnZFZ6XUdV/APD7/bjgggvks0iIhMPhQH19PV5//XW8+uqrePTRR/H5z38e+/fvR6lUEgUvEomgoaFB3sdzNoxFKux2Oz772c9i7dq16OzsxJYtW7Bs2TJcfPHFVWnHNY48NEdqjtQ4dqA58uDtAmiOVDHjSTPy+TxeffVV9PT0IJFIYPPmzTAajchkMnjsscewY8cOAOPFwS688EK88MILuPXWW+FwOOB2u/Hnf/7nsFqt+OIXv4gtW7bga1/7GhKJBHw+H0455RS0t7fjySefxLZt29Dd3Y2LL74Ys2bNwjnnnIMHHngA2WwW5513HoxGI84880w8+OCDePHFF+F0OpHJZLB06Xi9znw+/4GAXl9fj9bWVrzyyiu49dZbsXz5clxwwQX45je/iTlz5uDuu++Gy+XC+vXr8fDDD+Oaa67B+++/j9bWVpx99tnYv38/nn32WezZswfxeByrV6/Ga6+9hnw+j7GxMVExWGlcQ+NogBqYisUient7EYlE8NZbb6G9vR3f+ta3MHv2bLS3t+Oee+7BX/zFX6C+vl4OswKAxWJBLBYDcEAFHx4eRjAYlMPxL7/8MlKpFM466yxYLBaEQiG4XK4PBFqTyYRdu3bhu9/9LoaHh/G3f/u3WLBggRAJAMTjcQwPD6OlpUUyH6n3wcQEJJWxsTEMDQ2hpqYGXq9XrvNQ4GeQqPbv349QKIRQKHTQ9xSLRYyNjcFsNgsxqNncaCWbO3cu7r//frS3t+OMM85AZ2cnCoUCnn/+eXzmM5+B3W5HPp8XAjkYSqUS5s+fjxtuuAEnn3wyXn31VWzZsgVDQ0OHvD+NmYXmSM2RGsceNEceHJojD2BGH7gCgQDWrVsHu92OhoYGnHzyyVi3bh22bduGNWvWiEWgXC6jVCphxYoVuP322/HII48gkUjg29/+Ns4++2yYTCb8zd/8DXw+H9566y0sWrQIGzZswLJlyxAMBlFbW4vOzk5ccMEF2LBhA2pqarBy5UpccsklyOfzOO2002AwGLB27Vrcdddd2Lp1KwqFAr7xjW9g7dq1SCQSuOKKK7Bo0aKqp/hQKIQvf/nLqKmpwcDAABYuXAiPx4NFixahubkZlUoFVqsVN910E5qbm7Fz506ce+65uPTSS/HpT38aNTU1eOyxx9DZ2YlLL70Uy5cvR2trK+bOnYv6+noUi0Wk02lEo1HJ8qKh8UmGwWBAQ0MDzGYz+vv7kU6n8e6776KpqQmZTAbxeByzZ8/G8uXLYbPZsH37dqxZswann3561eeUy2U888wzePbZZ7Fv3z7MmjULnZ2daG5uRjAYRDabxdatWxEOh3HiiSfCbrfj8ssvnzRddDKZxN13341UKoW/+7u/w4oVK1Aul9Hc3IxXX30Vw8PDGB4eRl9fHzZu3AiLxSJKOtV/k8mET3/603jjjTewb98+lEolvPvuu1i+fDlqamoOu32WLl2KrVu3oqOjA7NmzcJLL72E1tZWhMNhORStpr82GAzo6enBj370IyxfvhwbN26E3W7H4sWLsX37duRyOUSjUezYsQNf+cpX4PF4sHTpUnR2dqKvrw8GgwG7du3CihUrsHfvXtx3331Ys2aNEDDbSP3ecrmMu+66C4FAAD/60Y+QSCRQKBTk9Rp/PGiO1BypcWxBc+TU0Bx5ADPywMVgPH/+fHz/+9+HyWQSNc7hcGDVqlVYsGABSqWS+CWdTidCoRA2btyI5cuXo1AoIBwOS5BtbGzEV7/6VUkrGwwGYTKZsGTJErS2tiKbzcLn88HtdsNgMCAYDOKmm24CANTU1MBgMMDj8eCSSy7BGWecgVKphGAwCJfLBafTia997WsfqCdgNBqxYsUKtLW1IZ/Po6amBlarFTfeeKMcEDYYDJgzZw6uvvpqJBIJ2O12hEIhmEwmnHLKKZg7dy5GR0fh8/ngcDjQ0tICq9WKYrEoikGxWJQBqz3qGp9EqClmP/WpT2HRokXYsmULnnjiCbz11lu45ZZbMG/ePPzmN7/BjTfeiEWLFuGtt97C5ZdfDrvd/gELQKVSwZIlS7Bo0SL8+Mc/xkMPPYSenh6cf/75qKmpwa5du/CHP/wBp59+OmprayVGqGTCufPss8/ikUcewYIFC/D000/jf/7nf7B69WqcccYZ+P3vf4/bbrsNyWQSDodDFpaVSgXZbBaZTAbAuM1h06ZNuP/++/GXf/mXiEQiGBsbw9q1a+W8zFSWJs7bSy+9FPfeey+uvPJKzJ8/H52dnbj66qthsVhQLBblmtWij/Toh0IhObNz5ZVX4oorrsCXvvQlZLNZ2O12XHDBBZg9ezYuuugi9PX1IR6Pw2Aw4OKLL4bf70cikUBraysCgcAHCklmMhlRLc1mMxYvXow77rgDlUoFb7/9NiKRCJYtW1Z1LxpHDpojNUdqHFvQHKk5crow3XbbbR/pA7Zs2XLbVVddBYPBALPZDJ/PB6/XC5fLJU/NVqsVPp8Pfr9ffjwejzxher1eBAIBeT1/bDabfBY71mg0wm63w+PxfKDSvVpsjjCbzXC5XPD5fJJ212w2w263S1EzVcGjT93r9UrxN6fTKdmZCJvNBo/HA5fLJd5OZm7itZlMJthsNvHj22w2OBwOOJ1One5W46gAD8+3trbCYBivo7NhwwZs3LgRbW1tOOGEE5DL5TA2Nobzzz8fV199tRRDnfjjdrsxZ84c8XJv2rQJ5513HgKBAGKxGNLpNC699FLMnj27al6q87NcLqO9vR0mkwn19fXI5/PI5/NoaGjA6tWr0dLSIvWHNm/ejJUrV8JsNsNkMiGRSMDv92PdunUSExYuXIhEIgGv14vPf/7zOPfcc6vi0KHg9/uxZMkSDA0NwWQy4U//9E/xuc99TpSxdDoNl8uFFStWoLa2FgaDAU6nEwsXLkTr/x8irlQqaGpqQltbG3p7e+H1evHVr34Vn/nMZ+B0OuH3+2G329Hd3Y2Ghgacc845cDgc8Hg8OPHEE9HU1CR2CV7zwMAATjrpJKxevRoAsGjRIoRCIQwPD2PevHm45ppr8KlPfUrSeU8nFn3729/uu+2227ZMayAdx9AcqTlS49iF5sipcbxx5E9+8hNcddVV3550rBxOlpGpsHTp0sprr70GYPKMJeoT5eHiYO+Z6vcTv/9wP2NiIx7qWqd7bYf6LA2NTzLUYoHZbBb5fB52u10q3JdKJWQyGTmo6nQ6qxZ4Ez+Hh/iLxWLV5xSLRWQyGTidzqpt/MnmdjablYryBD+Ln18ul+F2u2E2m2URODo6irGxMfj9fsnCVC6XkUqlRC1UF6iH2zblchnJZBLlchkOh0PuCYAkNXA4HKKm8T0Tg3ihUEAqlRICJ6mx/VOpFADA6/VWWSEmEm6lUkEymZQdDO4a5PN5ZLNZWWBPXEgfLoxG4x8qlcrSab3pOIbmyKl/f6jP0tD4JENz5KHb5njiyGXLluH111+f9A0z+sCloaFx7EENmmpmMf5NPYirprWe7HPUn4mvnSwwHuozCNUuwEPIE4Mlr39ikciJ3zud4MrrOFgb8G8Tf6eC1z1Z26jv4fvUtp/4OQStaRPbV/2MD/Ow9f/v1Q9c04DmSA2NYxuaI6dum+OJI6d64JrxtPAaGhrHFhjsJvq1GYgON13qZKreVJ8/1edMFgQPdT3q+/idBoOh6vUf5gHkUJ9xsHab7Hfq3yZrp0N9zmTfeaj+09DQ0ND48NAceejr0RypH7g0NDQOAwcLPNMNSB/1cw71uqn+Pp1gfLj4KNczndd+1HbWD1caGhoaRw6aI2f+eqbz2qOBI3VlQQ0NDQ0NDQ0NDQ0NjSOEj3yGy2AwDAHYPzOXo6GhoaHxCUdLpVKp/bgv4miB5kgNDQ2N4wYH5ceP/MCloaGhoaGhoaGhoaGhMTm0pVBDQ0NDQ0NDQ0NDQ+MIQT9waWhoaGhoaGhoaGhoHCHoBy4NDQ0NDQ0NDQ0NDY0jBP3ApaGhoaGhoaGhoaGhcYSgH7g0NDQ0NDQ0NDQ0NDSOEPQDl4aGhoaGhoaGhoaGxhHCR37gMhgMK2fiQj4uGAyGFQaDIfxxX8eHgcFgWHkUX/tnDAZD4OO+jg8Lg8Gw6ihu+88e5W1/+tHa9sDR3/4a08PRzJGaHz8+HM0ceTTzI3D0x+ijmSOP9rafCroOl4aGhoaGhoaGhoaGxhGCthRqaGhoaGhoaGhoaGgcIegHLg0NDQ0NDQ0NDQ0NjSME/cCloaGhoaGhoaGhoaFxhKAfuDQ0NDQ0NDQ0NDQ0NI4Q9AOXhoaGhoaGhoaGhobGEcL/AXSwPC8Odq4zAAAAAElFTkSuQmCC\n",
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {
+ "tags": [],
+ "needs_background": "light"
+ }
+ },
+ {
+ "output_type": "display_data",
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAA1wAAAFDCAYAAAAu+g+jAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAgAElEQVR4nOy9ebRt61nW+X5r733uTSAhIQQxCpQIisoQG9oaKtIZE0mB1qgAgSDGLthCQEjRd1I0UqBVUgpaAhFBEUrpOwUdZQlosAFFHQgEpAmQGJKQ3HN2M+uPtZ69f+vZzzf3PvfsS+49533H2GOvNefXvP3zzXd+c66xLEs1NTU1NTU1NTU1NTU13Txt3tQMNDU1NTU1NTU1NTU13a/UF1xNTU1NTU1NTU1NTU2PEfUFV1NTU1NTU1NTU1NT02NEfcHV1NTU1NTU1NTU1NT0GFFfcDU1NTU1NTU1NTU1NT1G1BdcTU1NTU1NTU1NTU1NjxH1BVdTU1NTU1NTU1NTU9NjRH3B1fS4pjHGT44x3jjGeD3+nvUox/roMcb/e8P8ffQY43TH12vHGP9ujPFBV/R56hjjS8cYP7Xr9193399qd/4nxxi/MMZ4M/T5E2OM78P3ZYzxw2OMDY597hjjK29SvqampqamxweNMV4wxvjXO9z4uTHGt48xfs8NjPuVY4zPvSEerxxrh1+/spPjZ8YY//sY4+CKPlPZxxifuRvz+Wh/uDv2P4CvZYzxHmjzjmOM/jHapl8V6guupicCPW9ZljfH38++KZgYYxxOTv3LZVnevKqeVlVfVlVfN8Z42mSMW1X1T6rqt1XVH6yqp1bVe1fVq6rqPdD0oKr+4hUsPauqPuzaAjQ1NTU1PSFpjPGSqvrSqvq8qvo1VfV2tcWbD35T8nUP9K473HyfqvrQqnrRrOE1ZX91VX3WFRdur66qG7mwbGq6W+oLrqYnHI0xnj7G+JYxxi+OMf777vOvx/mPHmP8+BjjdWOMnxhjfMQY47dU1d+oqvfeVches2v70Bjjr+zuNr1yjPE3xhhP2p37/WOM/zbG+KQxxs9X1d9Z42tZlrOqellVvVlVvdOk2UfVFiz+8LIs/3FZlrNlWX5hWZbPWZbl29Dui6rqE2YXbjv6wtoCzOxCsKmpqanpCU5jjLeoqs+uqj+7LMs3LsvyK8uyHC/L8s3LsvylXZuHdjslfnb396VjjId254RlH7/bPfFzY4w/tjv3p6rqI6rqE3fY+M27488aY3zDDmd/YozxF3bH33I31vN23998jPFjY4yPmo21Rsuy/FhV/Yuq+h2PVvYdfUdV3amqj1yZ7quq6rePMd7nKr6amm6a+oKr6YlIm9pe/Lx9bS9e3lhV/2dV1W4b3l+rqucsy/KUqvofq+rfLsvyo1X14trdjVqWRRcyn19Vv6m2yf4dq+rXVdWnY663qaq33M31p9aY2lXW/lhVHVfVKybNPqCqvmNZltdfIeO/rqrvq6pPWGnzjVX12qr66CvGampqamp64tJ7V9XDVfX/rLT5lKp6r9pi2bvWdsfEp+L821TVW9QW4/54Vf31McbTl2X58qr6mqr6wh02Pm+3Vf2bq+rf7dq/f1V97Bjj2cuyvLq2d6O+Yozx1lX1JbXF2K9OY10l2Bjjnavq91bVj92D7FVVS1V9WlV9xhjjaNLmDbW9S/aXr+KrqemmqS+4mp4I9I/GGK/Z/f2jZVletSzLNyzL8oZlWV5X2+TJitVZVb3LGONJy7L83LIs/yENOsYYtb2I+rhlWV69G+vzan+b3llVfcayLLeXZXnjhL/32t0xe6Sq/kpVfeSyLL8wafuMqvq5a8r96VX158cYz5ycF8B82m6rYlNTU1PT/UfPqKpfWpblZKXNR1TVZ+92TPxiVX1WVb0Q54935493uyleX1W/eTLWu1fVM5dl+exlWe4sy/LjVfUVtcPGZVm+q6q+vrbb459bVX/6Ucj0Q2OMX6mqH61tcfHLJu2uI3vt+PqmqvrFqvoTK83+ZlW93RjjOXfHblPTvVFfcDU9EehDlmV52u7vQ8YYTx5j/M0xxivGGK+tqn9eVU8bYxwsy/Irtd0P/uKq+rkxxrfuKmiJnllVT66ql+uCrrbbEniB84vLsjxyBX/fv7tj9vSq+qbaVutqjPF2Ay/72LV9VVX92usIvSzLj1TVt1TVS1fafFtV/bd6dIDX1NTU1PT4p1dV1VtdsX38WbW/s+IVu2PnY9hFyxuq6s0nY719VT0Lhc7XVNUn1/b5KdGXV9W7VNVXLsvyqmvKQfpdu/k/tKres7Zb8Wv3Mgzh5kfU9WQnfWpt7/Y9nE4uy3K7qj5n99fU9KtGfcHV9ESkj69tZe49l2V5alX9vt3xUVW1LMt3LsvygbW9sPlPta3MVW3vCJF+qbbbEX8bLujeYvcgb036TGm3TfBjquqFY4zfuSzLT/FlH7tm31NVzx54A+EV9BlV9Sdru61jRp9SWzB88nV5bWpqamp6wtC/rKrbVfUhK21+trYXSqK32x27DjnO/XRV/QRw8WnLsjxlWZbnVp1vn//yqvrqqvozY4x3XBlrPumW/kFt5fv03bHnADe/pq4nO8f87tpuT/wzK83+Tm1fcvVHrstrU9O9Ul9wNT0R6Sm1vVB6zRjjLWt7UVJVVWOMXzPG+ODdBc3t2m6bONudfmVV/Xptv9u95OIrqupLdnvRa4zx68YYz360jO32t/+t2n8OjPSy2oLZN4wx3nmMsRljPGOM8cljjOeG8X6sqv5+Vf2FlTm/r6p+pKr+6KPlu6mpqanp8UnLsvxybTHlr48xtMvjaIzxnDHGF+6afW1VfeoY45lj+xMjn15Vf/eaU7yyqt4B33+wql63e2HUk8YYB2OMdxljvPvu/CfX9sLqRbV9wdNXj4u3A/pY16HPr6o/OcZ4Gz9xTdmdPqWqPnE22e5O32dU1SfdJZ9NTY+a+oKr6YlIX1pVT6rtHarvr+02QNGmql5S28req2v7bNfH7M7906r6D1X182OMX9od+6TaVsO+f7c98Xtqvq/9bvh77hjjt/uJ3XaGD6jtnbfvru1LL36wqt6qqn5gMt5n1267xQp9am1f7tHU1NTUdJ/RsixfXFts+9TaPqf001X156rqH+2afG5tX7b076vqh6vqh+r6r0D/21X1W/Gc9GlVfVBtX8DxE7XF2r9VVW8xxvjdOz4+atfuC2p78fXSNNY1Zfvh2j4a8Jcm56+S3dv/i9ri6hp9bV3/eeqmpnumsSz9m29NTU1NTU1NTU1NTU2PBfUdrqampqampqampqampseI+oKrqampqampqampqanpMaK+4GpqampqampqampqanqMqC+4mpqampqampqampqaHiO67g/JTWmMsYwxalmWGmOcH+fLONJx9QnjXeo/mffKdmmOGS+J5+u8UITjXYefNM9V/dJc0neSbzb+jFe3X2p7XXKdXtX20cwz63cvfN8r/WrO/aaU81eDZn7J81f5Muk68T2LQz8+8zsf8zr5w9un+a86dzdtruLBx0rj7dr/0rIsz6yma1FjZGPkbMzrtG2MfHzP9aagxsjL7Wbn7qbNVTz4WLPxlmWJznetC64xxnsty/L9M2YODg5qs9lMFbNmtPT99PS0NptNbTaXb8Alo6q9J8SDg+3PQpydnV2aS23Vzx307Ows8uYOrmNqx7n4WbJQJ1ctbNjf52E7n9dBIckxA3vXtY/h4/gxyiWZxZuOadzkM5RHxw4ODur09DTyeHBwsNfW9cPxvR39S+eSXdhuNrb6uKzUSepLmTXO2dnZpT4+FsehX/rxWQz6fMk/fV71lZwen2mONZ+TzjWmbOz+Rv8/PT3dswnnmfl0so3zk3xVx5Pd6L9nZ2d1cHAQF2Uzmx8cHER7ed7RPDp+t35A/XmeoI3pL/7f25+cnLyims5pDR935xsjGyMbIxsjGyMfAIw8OTmpGV3rgmsNTDxY9V3KdWFJNJQUJidl0tF5jeMCCtB0nuBCfqpqj0c6bQIakbehcegEnngPDw9jMOv70dHReXCImDjFM0myucHlnK5Pl4HfHQASj55M/byPOwuSBDYOzg6gPgb7nZ6eXrLRLHgJbrOEzeSk/wcHB3VycjL1h0QaJy1SNL/sqzaeINxXvZ/6SgeuF/lD4tn5WpalDg8P6+TkJC5EaCe3RUo2bivGNWOU5DHutne7Sh+zxM+8MgMZBzGPK/d5go3zRHsxaSuPESTUhz6gvoeHh5cWWMwBHiNuIwd+l/mq2PN+vvDzXNW0pTV83J2vqsbIxsgL/hsjGyMbI+9PjFyje/4drjHGQqV5MvNgkmNdJzh53K866Zh0VB1L4zABUcFuhFTNYHt9T0nS2/EK3/nxsSlfSo5yJOmaOl7jzatPHgzuWDzORCM906Ye5HS+lDh1znXs31Py0ndV7BxQZu3T4oNg63OlSs5a4vQ51ebk5GTPN70SRJuyvydJJu+kTyZQTxgaj/FJn0+Lq+R3DvTyAyY217H7h857bkgypKoZ9ZAWbySvnFPHs8o89ZlAZ7bQ4THZnX7kvni3/kXyfMJ86McJAFzkreW3WYWb+j45OXn5sizvdknpTZEaIxsjRY2RjZGNkfc3Ru4u6GPCvolnuOrw8HDvyk6AQSZ1nAyKZs6j/nSaFAh09BRUHpxOXnVMFQ7KkvjkWO5ss/lTMPic3oZ6YsVlTV+edNQuJUDaQoHB8VTlIG/6zCR0enp6XkGi/M6nKFUkvY/O8ba6B5wDiSdWb+f2W5btLWH3Ubanb3sQOk+Hh4d7yY+AlRYZvjAg70k3zoePkT7z+0yH7gezGHA/c3CgzhJQXSUD5xQpGbLynfiirMmnZgsY58vPEZw5p2RT1d7lnfHhvpji132AuvLFoi/sxbP++4KQ/bjIcl1S5qbrU2NkY6Q+N0Y2RjZG3t8YuZZL7/mCSwySMSasFKzpynHN0USeKPy2qI/J/+LLk56PTVDyJOgJeBYUyUGSw8h4Oja7qp6N4wHH4He+uZ1iZhe2d73ovAdRCgJ99qA7PDysO3fuTG2cQJGVnnQ7OFXEyDeT/bJcVDlU+WNlp6oiAKag0+ekF1YneYyUgtKBxo/PiLzP/FtjcjuO+FQ1lLJQhzNenTzu3WdnsTMDiyR3AkC3caqa8fsMrDxR65h0xPzBvgTPpBvO7zmLPM0AxeVdywUOCL4wvsq31Yf+6/m96e6pMXJ/nMbIxsjGyMZI19X9jpH3fMHlwT/77Ey743Ksqv3bewoAJheRBzCPMTlr/Jlz0ihUYHKSVNVIxhcvMwB0HjwAScmYDhAiGn8WUASw2UJgDTS8LY+zv8afVW7p7NR5CiLNIX9wGTUP+6StCDPfTFUPzuuUkiD3KntCJJjRz8jLTDfuQ26jGa+uB/LlMlB+jyf3ac7jPKbz7g8cz2/1s7qUgCD5Eo950mdy9IWIL5zIb4pVLcz8WNVFxZp2cPCbgZhv/UmLCfVPlVDq1/VKXXkMq6+P5X29TdP1qTGyMbIx8kLvjZGNkbTD/YaRa3Qjv8O12Wziw35ikMmCCvYqCIOEt+zoqLxFSgX4YmAWAE4JSLzvVf04/kxe14lk5W1MT878zMQh3ek/r9g9qFMiXUvU6kfn9+pNSnqeQBw0XD8iOr9/9uqpjskHZkmLzp/AlG/LOT093UuamiPJTd2wnfNA3/UEJdmS7nwc9xOfl3FSdfFga1qscEz+pQe4PVkn8HLbcHwec735MfHNOFhL+C5XinvndS1HpBilHhjLbu8ZiF6VP9xHkk5miwSPWc+hvqic6YZEOT0/+F/To6PGyMbIxsjGyMbIBxsjb2RLoYKSQqXEnAI7CezJwa9WfRwSz+k2sPPK/35lyluE5McV7POlsa+jt7Vkrs+uk9R/tmfb21ftP9hIG/htUuqe5Lag7tYA/ir7ab5U0fWKKYPceWA/8uCgymOu85ktU1B5RYrgTnu4r8+qlZRTFcGkO48P8rtmW8rCqtZsMZIAJlHyB6cEruk8dZSq5WnetGDxXENbzbZkcO7EX7JhWsCkbS/O02y8RMm2qZKX5qO8PDbLZzM9Nz06aozMY19Hb42R++M2RjZGum4bIyv2e1Ng5Fpuu5E7XO6IbqCkHA9yOoO3W7s9nByJ47vCPFBSNYhOxLFnFSmdS0k38eWJxYGXY7tDJ/ldnz5u0pPrgklYDqpAnm1p8eTMJDKrurDqSFBwmagPT7hukxnYJ1npF0wCGstlnMmdbOXzEJQ9SVP2teSR7O+/Z6Hvkif5aZI/bQNi++SfVXUpgc3iN/nfjDQXX9Wb7OjtZ3kjyU57E/g5ZrrDkPyQ40snzuta4ha4syq4BrBr53WOvqz99CR/ZfJMV8nvyEfT3VNjZGNkY+TleRojGyMp+/2AkWt0I28pTElT5N+TsqVYHzP19eNuJK9kuBN4Vc4/kzcf28dLlUHK4smDx3i7NDm1jqWAcqfyK/5UqUkO6WNzTh+TbVOFIYHMLBkxkGkXgkzSlc/nAe9zup58bE/wXg3hmKqicQyv2vnnzWaz+tsdyTZJf6yceUKa+SbHTvqexeUMdN1HyTcrvg5MvshM8ife+T0tOtJ38uXbCFw218mMH2/vuuI5+kOaz8eaAfHM/91+SY/0mTV9u85m45Ck16t+a6Rpnxoj9/s0RjZGNkY2Rqb5fKwnIkb6WKQb2VLoE64ZnUy5k3iS4pthZkKkYErGXgOSWXD7PH5r3pON88G+7OeJ09umY/w+CwS2mwFRoplMrmPqd+agtKsnP+eT43hVZZbsZolyTU4mNpH7Gv2DfTSnqmIOYm6vlIxnssy2onDO5Kdpe8R1aI1njcOH72eg5jamnsirg+0a4PkYVft2c19aA2SPe573udPCjzTT71U5w3U0Axg/PsY4/6V6r6bO5kh8JVC+Coxm8nucNN0bNUY2RjZGNkY2Rt6fGLlGN/KWwhSYKSm406hamhxEzkhjzEAqGYXEStbsan0GSuLDA5tjpsQxA4AENvrsb3BJyYbHGAh+PFXunA/XB3WYqpCiWWVEfTzR6BzfoqX+Tu4zaS7X2yzg6TdeWXE9pTfgeAJy/5vpWOe5JcbndUDyeV1OJZmUvGexQL4YR/SVqxJ5OjdLRg7Is7j2PtQJx2dVUDylirnOcf60OPJcwrs1zFneZ8a762Lm17QT+1CelOSTz6Yqv5PbcWZvl2Ems3iijZquT42RjZHepzGyMZJ9GiMfDIy852e4kjPKEXReiqaRCBQ0ApWXBGSy4vwzA/K7BwjH9j/y6Q6cHNSrT6k92yT5kizps7df0wOP0aH4p2PUjweyjzcDGpfPdZr0p/m8OqRjHoyegGeLiWQ/Bz9+529vcGzKwOBPwTVb4NAHZr7r391fyFO6hb7mV4eHh3vbQcivfrjzKt7ID8+rny9iElAmX1+LVcb7mr7TImTN/uTDAS35K+MjLRLcj7lgSHHqwJMq+eSTb6miH3pb8pYWBFeBj8ea870GJk2ZGiMbI13WxsjGyGSnxsgnPkau0Y1sKaSDcUvBLLmyyuOBkW6LpkSZAtUdlQr3YykZk2+1TYlqWeZvd/JE7bJ4xSAd97FTwp9Vhhj0yeF8TPKuP83vtkv6umpsAgOrea5/By5PVmpDAOc4rA7yNa70M09ibvNZxcoDne1pA8k5AxLXyVUBq3lOT0/3Fl3kieDgCVI+wOOudwcD8ubyui+4XzsvlCnpl/Z0X3B5aPfEm/PpwMJtIM5/+u52EPlvyDjA0SecH+o/5TfFnVcpPTZSwk++KD5ncqktP88WiSnXNF2fGiNrb8zGyMbIxsjGyPsRI9fonu9wMeD2Bp5U5DzIyLw7NZWvY1JsSj4cwwOLfM0UM8bFD7apTXqTCc+LX43tQKIxE8DNqmOpsunnUoLises82J4cWsdpp+S8s/GoD9qE/T0JMRF7EuJ4HvzJ8ZMc0qO/qYh9+Fe1v9XCg5Z6Sf7l4yaeeWzmY9LH4eHhnjypf9KngJX6pR69Ouj/1/TDMeivTglcGAvuK1fp3Ntye9HMJprfkzz1xwXKjI9lWfZ+x4VVT84neyZ+HPiZG2Y6TzHhudB1K/36+O4HblPeiUgLgqa7p8bIxkj2a4xsjPS+bN8Yef9i5I29NIOMzZRHpyZApCTD8VgVSNUKv3LlmInSL11X1XmVhJUmAlPVftUkBaobQuO6I7OP5KIO1gDMkxJ1kBIuHZ7zc17qXXpg0NHR9MBisq2O+YOlOq594LQng9gBieCeEhZ5ODo6Oud/RtSRBwz9LSUlzq9nCcgXfYKLHy5sXF9e7SPRBp4MyTPHdt+d6SKBt4/v5xyENJ+O6buP5f6WANd9aC1+vZ239e8CLdnLt1DMZPYY8LZeqfOtKGzvviWifZxHX4gQRFxO+hj9zIGDPPudAPdPxbDmb7o3aoxsjGyMbIxsjHxwMfJGLrgUSDSulEImuB2ACpBAvM3tAag2XuHjrUVPRu6UDgKJNKfGpDE8CDwxyUkVbHQcOgeTUHJW50fjezsm3ATe4scT8dnZ2XlFKAUW5fUKGfXqwJ5Ayucln/whULahvB7kfp5jCuSYoFOAUue0G8cXOZioYsPj3N7CuVPVMiU/b+OLMV+MpD5pHtfRLNFwe4zP4VV4P08d+ULGx2FspHFmD+dqb/bR0dGeLghkqZKbQCXFkdslnZ8dc3/SPDMbp+O0dbKvxvQqps7zRQKuY47r/uB+4QsWl2u26Gm6mhojGyMbIxsjGyMv5nkQMfJGLrhSIPqvuut2NZnkd45Fpj3JEEx4dU0FMzlRIZvNpg4PD8+Tjs7P5lgL+rTtYk3ZvKXtgelOdNWihvoTpb3t1LHzyIdfU8VRbRmoCurDw8Nz+b3KKd5PT0/PAUtER6fcmufk5CT+YjyBzBMlE3d6oDfx51WxmS3Vz23idvYx+N2TVErwPq+Sg9ql2/gcjz5Jopzc1pBARRXb2dYNTzZJV25fPYScEmSShdsWfC6Nxflc9rS4oV5m/RNQJrt7HPG5gZk9nVJF1he4rhfqi/3cn1Mf10UCRm/r8rr+mu6eGiPrXNbGyMbIxsjGyBk90TEy2f98jLWT16ExxuJJw5ny5GH9rz0XA0J9kxKoXF71zhJHkGkqhycNH2vNwEoO/E7HuE4Q++1OfXZndsdyh3IgODg42ANZBxImjDSP8+5VzJl+09hcMHBsd3TfrsAAm/mZ/IdbXWakZHEdnSZi+6QPT6gzP+A49OlkA/WdPfya5qZuRK5b91WRQJwJ1RM75ZglQV8kul7Ic6o0ug/xmNtK81HeZAsS8wjHTAvR1N/7LMu2Enx4eHiug3T3Yub7LtcaQIg/f9EA/UZ69Ype0vXp6enLl2V5tyhg0yVqjGyMFC+kxsjGyMbIy/REx8idX0Wnv9EfPk7JJxkoKcirCzxPZSUH1f8ZsHjQeN/ZcU/cVZf3V7uDkhwwOI6T2h4cHNTx8fG0bQJpv6p250qV6Vlg+L5rBw6vis7O0+6zNzqpjwJJ7VIS94TmoHB6enp+O11zyvndvr644H57zsEHcZOvEBgoN/XjNqHfuF78/CyJ+meRqqscw8GLdlcl2xdYmpvx7Isz8ueVXPojk6fz5fpyuR1kvBJNSsn0KrCX7T22Z+Ol/gRIblmgnB5/kuHw8DDmiGSLlAvJh4+dgI62FK98la78I9n2Kl02XY8aIxsjGyMbIxsjH0yMvNE7XExubvh0NU+nleN6MHilIiXONUVXXXYqT06axxOD5mPbVB2gsf1hYwYDjUieXTdK6ikQKbOOcWuKVyXcGZJ+fH7XKROX656y+PYX9WXC98TJ8RiUlO/sbLtFwx9uTYuTBFjkN53nPDO+krz0Xwdvr/bMwJ0JR/x7JUz+Ix2zrdtaCZK2pl87GJFPf9DbeffEpjFnixXqk1tZrloAelyt5Y2UrF3Ha4uyZFN+F7++iOQ8OjartHk/10fin7wTxKkTt0PiyeWdAQ/jkrmYzwRozJOTk77DdRfUGNkY2RjZGOnUGHkxz/2EkbtnGB+bO1xUlP7rqpTBr3aeCCUMGZbSuEeX4yej8pxu/dOhNU8yDgPXae2YJ2mONXPK5FzuhN7Gg4afN5vN+S1X17UHPvVN/ma69KTp5/27V1/JNxcMM5rZlQnUQSVVIWcJgmPqnFcK5Qfyx2XZvtlJ83sgexCL/NY3kyr90H2e7VnR1nnt4aefsx+3vLiONI6qw+4D7odexeVCjqCkOdSOsayFQFp4JdvomIMndc4FB+dOunNQXQMEj2vqgtXZBIAcR5TiLyXsNGbykdmi0r8TbBN5VdbnF098i5pXp5uuT42RjZGNkXUuv6gxsjHyQcPIPNujIA8qBjyZoWO4IvzNSBSYf6T0nYmA88tJGQipkpLGZZvZ+RlgOoiwujNzvlT58aBQoDHR+YOF+lMyT4mWf9S7aLO5+H0OjeHHyOusGrumY81JeTyB++KAOktBkexFHY8x6ujoaE9W+U/St+uEfSgbk4DaSp6jo6M9Ha7pgmN41VoxlsCT5LpnNdVBdCYfk7X8zscgKFP+ZVnq+Pj4ki+6j7pdku7Zl4sB8u/jeBL0hOs2lJ14nFVP2kLxxnY6rz6e0+inihPynQAi2YZ/3iYtdsTHGGNvAcJ+1K0v4KmjpkdHjZGNkRqvMbIxsjHywcPIe77DReMdHh7W8fHxJaV40vQqjzPtzNPZUqVL/z2x+RWqfxZvDHbyorbp+FU6mRFB1vfDig8Gr+RIMvCVsTqXEgz1l5KdH09ycs5kNw9Wjs2Km/PKoHMQU2Inv6kqw3nX7OD+pP6p+iO9MzHQNu5HqbLqOmPlzPUn8mQ5S36uL1UcOaZXqLmVZ1aNoRyuf5K/ZlVtPXmmyjP/vCKYkjl1RXClHyVQdN1TPo7vQESAW4v/FGNc9JAPtUsV7DS/L6TYhv5PXn0MLoy0hUY2cXv6mKRZzDVdjxojs05m1BjZGElqjGyMfCJh5BrdyJbCqv0kRkf1pJ+SGdu6wA5IHtQpwWk8Koi88TudRf1mju68JkBMSc2TQ0oWnOP4+HgP5NxJKQP1k2wzA4eUQN3JeauadvE5POG57bgvnf306tyUaDQO+R14akUAACAASURBVGJ1iPrywFuWZS/ZJqBKvjmbj4nK/czBmjrScT5YzHl1y9/no2z007RQSr6V+JuBsPQs0OCYAvkUp5R3jG0VlFUhtk2JyGPPE6aOpSqS64V9PFGmCjl17Dbxc+TD/Zuy+NhpPtcLj/MNaCmW1U929PzDWGB7r36SuNjQeeZTblNa46tpnRojGyMbIxsjGyMfDIz05yhJN/bDx1X7bydKBvIASE7jgUaj0TE5Btt6guD4dGAHNQ8c7+8gkwAlAafL6Q+7sh35SsnEExvPE2RYqWF7yutA4kHlY/P3H7y64mCUqqazaoZvq0nJzp1epODz36ZIwbpWNdFnykmdif+0CHCQTYuMqvkPmjoIpoWI2vOYA5fkT230nRVUnpN+qWf9pcWD4pwVvLQg8YUP5Ug6cv0lcEwV7jUQdV648HRda6xULff2lJW/W+QJOcno83keY9JmTKd8mPzFbT8j2dYXlvIFAklV/t2gputRY2RjZGNkY2Rj5P2PkWt0I+iZkiqZSxUnBxqORcbppCkJpQToVZhUIdBxHnOwYyKeBQGTlCe8WQLgODrGBOwVxuRAarfZXLy205Ow5k1ypURPsPOKC53bZUoOnPSVEo6P4YCtOVNQziqW1CPlm/GjP4KRt/c+TrSJJ+6qXKmb6dPHFa29wSol7Jk+XV8pLhM/mpcLorQoof5Tm1S9d1kpL2ltuwP55n8HfV94sC11or7kNS1KCMauP8rlcyTbVV1sbfCFjW9pmPWnbmbyeC50APTxOV7T3VNjZGNkY2RjZGPkg42R97xHZDaBC+gB7W08qNMca0G81iYpyPulQErJWe08OBMPlMfH83H9Vbnko+pir7Qf87asMnmCngW+Jx0mPHfgmW7ES0r8s6pZ4mFZtm8RShUU1zGBeE23/Oztkh/wnM/v/Vwet5PfYl5LVK6HWXK/js/SJtSl+2OqirqeHACp9xnQq522UblsPg/5TDwkW/KzxuUYPh7t6YtTxjWPue/7ApR9ycMM4LzPDFCV3L1q5zLOEr3HWAJLn585KIGJ/Lvp7qgxsi71b4xsjNT3xsjGyCTLExUj1+jG9odo8tmVeVVOYjyeiA7sCkngn5Kmnyc/yfDuaOKDMjlfKbA5D3m7yiipH+dKAZkqh+nWMHlN5+ionE96cV24TaiPBBpJviSrk8/Dz+KbixA9hOzB77yx38w/yV/am+w2TYE9i4mUvBLp9aOUWUDlfjFLMJ4gNT9lo05SzBEcfCtJkn8WW+5DikUCXQJwX2hyHFbQZj5HvlL+WUvwa/Zxv0l5cAYyyX9cx85j8kP+d39iHNDnnBfO5WOlannT9akxsjGyMfKyXGzTGNkYeT9j5I1ccM0UkZKJb10Q43xoMzn9mhFdaM5PBSa+vL+PeVVCIYi6A8/ALB1PBpS+nE+Ocx0wcxskHSZHYSWQeknjkAeXb7aNxIOOOpgBHbcdUOfkX0DqyUXn0qIgycFz5JMBrrHSA7UpySVbePsEnEmGqxYmvu0mLRTcPowX8kn5UyJ226k/beagl/igDasu73H3hJx053YigHHsBL5ug+TrKfnP/FY02yLkeqZ8M9lm21CcRx9jzVdSHx6bVXmbrkeNkY2RjZGNkY2R9z9GruHkPZcskwI9WbBtYl6f/dagjjtIVO1XPRyEnIeZQ/B8auNbPNxhGIQ+d3KelCycJ97iXtNVOjZzrpkuPFEnGXn7P82vP7Vbc7YE0Gv9kkwpuDgW+6UtOikZkoc0hut7pkMHECZOJoGZLmYA6ny4vLPFnPO82Vz8LkxaoKTtBW5r7+cPGUtGzuVjuzxpXunQ5fV8kOLP9Zn0c3Z2dol3p9l47ifkL+Uw9p/pngBDIBTRd5ZlufRjnu6v7pcE4xk/LmOKuXTHpGmdGiMbIxsjGyMbIx8MjFyjG9kjwh9RFLNueCZg/jaBjzEb29ufCxCqCvxzx6XSqKgEbPqckvNVYMV2s6ARedLj2F7RYP+r9gavJcMkr1dWk9NfJ3jFG8eWnMk2aR71nSX9qn2/Y9u0wJkFDe3nn6n/q8DOdZ582/3I+SNtNvkH+6hrypgWBE5K7jN/88+cx2PM56Gf+o+q+sIvJTrK4fNpDLep68O/e7sU6w7InM+J/sp5UnzoXKroruWbNI63mS2SNM8s5l3WWWy4XPzf2wofHTVGNkaqH3nj2JKzMbIxsjHyiYuRa3SjP3zswcg2ZNDBJRlUfRhQOu57Z1PymO2dlbLXFDPrS/K5ZwHPMT2oNA8dLo0/IyaGFEyJB8k1m096ld5nFW3phr9D4DZPY7rdOL8HAD+7c+utUyL+cGSyn/ul+5H4S7LMgopt/PcYXAecy6u+Igeu5LvkPcXNDKw0ntuTY/qPPnoy9wWCAE/jcAG52Wz2fkjQt0w4D+4rbDt7FbXbYG085g61SYk//Wgpcxxj1n3ceeBCKbVNtvW3bHGumf7Sdhf6hMfyVVsfkv6SjE3Xo8bIxsjGyMbIxsjGyBspV4phFyYlWg82rzg4w6z0qc3s6jUlDD/G4NCYsyoYgYxzqt/MIB4M4oV6cj5dN/quSojfBp195vxOazpKSYjVsgQqa0mMYEOdi9JWk7SAIDEo/NfaOZZXiFzWpK80T1WuKnul0/XnFdc1H5vFiB5m9m0XTDaUw/lJ86jNwcHBpa0bBOAUM6mPAwhl9fZeGabNxrj4gU+Oo/HpN0mHiWa+5MAy8yFP9G5Dl1t902tqfVHjOkl6dRlcfl8kiAcHMPePlGsY5y7PVQDSdD1qjLysj8bIxkjN2RjZGPkgYOSN7Q9JRp4loZRYPbjlWL4VIwnOPlW5SqE2biBXWkrwcjyO4f1mCYo6oDy+NScldCXMNJfrLOnRE6vLvpbkqi6cdAb0nCcB6+np6fmP3XllS/OTT+fDA0t7cv2X3avq0i+Qi28H4jUwpp3U//DwMAIbeWQlLcnGHz5M+vLFkQMMPytJ++KGMnAM9ifwJSBINqR9+EYrVbq8r/jy373hOfeFs7OzOjk5ufS7Qj6/f3afpH75uljG7PHxcVwgST63IX3BdeJtqW/PQV5p5bgOxj4PXwHtvHFMn0vVVY7rix6PMV+4MO/eDbA0XabGyMbIxsgsW2NkY+SDgJE39pZCr8z41eJ1+nkQsL8Hg4+b5lmrGIhPb8fv5HNNDlWTGJwzsOI8MycQvx68zg91w3nd0bzapTF9bH9FLtvPtpCsOVly+JnjOm8cg4nLKyFOaRHgt4cdoLnVYiYLfzGevu3+6reudSz5uuuDvDHZ8VfaXT+0OSn5NEFE3wl2adGQ9EFZ1Neriu5fTOApxsmPdOiVKG2/WAN26lxjy3ZcpPrcvvWDfCRK20SS7WeAlGLY/dJjgzw7KKVFFOOGpLeF0UaznER5+6Lr0VNjZGNkosbIxsjGyPsLI9foxn6HiwlVzLoQfF1pAh8JyN9SEM2U6uf9VrfO0aAOSCmIRFcZUvz7WDP+/NyMFzpoGsv1mRxR46QqKXXix10etqu6SFQpsc0ck680TpW6qsu/mk6bpURAfg8PD88TDuWl/AQV7pf2IGGCVT9PBLMFj+tQAU3Q5vjUhYOSJwjJ5uPTdlw46Hha2I0xzn+3ZJYI1xIa7eGycQxW0Jx8wSO50zg8lwAv8ee8Uc++IEiLpRQfDp46xn5J5qTfdC7JzJzo/Li8Kc/QVgmsyYMDXNPNUWNkY2RjZGNkY+SDi5E3dsHFqzuvNOl/YjQ5iAeuiEmTAOAK9MqFlKkKSAIPVz6rNeJTAa125H9mxJlB6Iz8L74YqAnomAQJGg4CM10m3jm22qcrfZffnXQGmvxPHp0f+kaqGLAPQYHg6/rkOfc18UwdEnRErEbR75xmdvUkw8/0XfcB2pny+Xy++PBFANtyLiZXjpMSjY/jCYljHB4e7lV3PYlzHLc9/UixwjmTHmb6SwsxBy3+RkwCUR5jnqNMqbK9lgNc32kxSP4ddFw+2ly+kgDOfZ9yuK5db3cLME0X1BjZGNkYua/nxsjGyPsNI9foRn/4eKYEMk7BKezBwcH5/mON4UCUEtRasuU4XiXxW7F+29+dlQZIidp1oT4pmMcYMUm546REI105v9427YP3AEsg5nJSnrTlwuXSPExOM53Mgszty2Q/S8QMFufb/YaflfC8AkpeGXQu7yzxsFp5dHR0vi9adpWO3M/ULwGJKpTUv/usJ9+k61Q99aSetjok2ydwJQh4AvM3dnk+0Piqekom+R63jaSql/PlCy7KOas+J91zXC1gUjKn/B6TSU8zX5OMOpdk1Jh8eDxV4ahf2YUAwvyX4oY8XAdUmi5TY+RlXahPY2RjZGNkY6TmfKJj5Brd8wWXGOLkqeLibdifVTWRlOHOzOMpwScAc2KwJYdxHnw8Opxk0znf482xXS909pQUNJ7LmJKFjykevd9VgJ8qsLMKjSfwlKSoxzSX+uvWvSfopEfawgNP43ilznWmOXzfuwc+tzqQL+qYclZdbNPRuTt37uzJyX3WPmbyFy4Q9F1ju1ybzWYPyJI+XZYZCWSZ5MmPxvIKHSlV5ZQA0zYSzk1K1W73PY8hPt+hfgmovHLpC6FZkmc8kdzHNC51lBaZ1I+DTlroeUwlgOX4Htv0eT4MnaqYs9huupoaIxsjGyNrj8/GyNo7z+ONkfcvRo41Z7oObTabhQ8UJiXq3CzhUEheYbLfVcr0K2k3oh6qXJb9N+KoffrF71mwJdCh083kZn/yzXGYdDxAfE6CkcZhoDAZrzkCz/t4SZ9Mugnk06LC7e9gwvkYFBqzqs6rV57kkv45r3hxvlgNq9omBz4c7GO4HnjMwYjHyZf7FPc7u544DsFvVpElud0Sf7OFEnVUte8LBAMfi2P4Z43pyd1p5qsJXMl3WgRpMZCqrmrrCZl9k840xsHBQR0fH+/x4z6ZZPC4EflCIi2kZoA9i3Pqn/3ddt5nTYadTl++LMu7XTrZFKkxsjGyMbIxsjHywcDIXQ6NyeSeL7jGGMvR0dE5w6rErd3Sd8YdFNTWwYeJxqsGHF+Ku+q2cUoC7rTs6wlJt03XnMQTIilVFqQDVoA8Aakfkyv1meQgP9TRLHk5zRYCKbmKb4Ijbc3Kl1dlZqC0Bm6aT211nrzOEofzp4TtSdoTticwrybznOaVv+jZBwcZze3+roWQPwDuciYbpgVX0iN1z36yk/tf1UWllHxwLsUn53I7rC0A0uLM7etyEihYVV/jjb5G8tzgc1zli97HeVF7+kWqdjKuvVLqseU+yuc3eHwNnDwfeCyfnJz0BdddUGNkYyTHFTVGNkY2Rt5/GLmLjXjBdSNbCikwHcwNmhzYE54HOxXmb83xtiIpy39Z3YGHCZkKdsBJ86ja52DH+SnXTHezhOe/JSE+WNniLV+XXzw4AHhbBgz7sMLpenH5UlAx2Y4xpsDrffk2G09mDsr0NenP/cJtIt0ycTu/vkd8Zjvpnn/k2dvrHHXLRYknO/VL1RaPOwKuy+7HaANureBY1NtsoeFVVi5MmCir6lJFOdmV85A2m83eb8jM5qWePS69zVUg4TYk2LsuHKxTXks50PXtfiAeUq7juGyTYtkBZ0a+KPHqONs0XZ8aIxsjGyMbIxsjGyPza0weBcmAbsS14HKDss85g3blmMbieOpL5ep86pOOeeXIAYXzJ14oA3mQfvx46kd+UtJlVc/PaawZfx5Yqj6tVf40jwedJzoRK3SuC87NB6MTn06UV9tf9N3fHkMdSF+sIHIeJgL6kPyI/iQ9EUzOzs7OH/rl2J48+McFA+eYJQ6Xlbr2uVJlm0lC/Gic9AYi9x3KS30l27A/EyWJPDqAMRlqP7/bUDy5npL9GYOsgnqszkh25VheaZPPq/rrgOz+R5tQB8neXrXTee7B1/j6EUjaRf998bPWJoHfDPCbrqbGyH1dNEY2RpKHxsjGyPsBI9d0dCNvKdxsLn48zA0pRSigPUnps8bwoPNA0XzqM1OaC514421xJhHxwn5q5wEth/IqGckrZDODuFNfBTyUNQX+bA4PCifpNQEpiUFEXhhsTJTURxpH55igmYg8AJI+KAOPkTf9yX6sDnlQecJwG9I/Dg8P9+YSeYWM4/Az/dOTT6rwJZ2nZOTjs9JH33H9cB7qkfF6enpaR0dHU39JizpPijP/YRvaNS1g0gLIx9G83FeeKpOeJ5JMrm/6kPp4HKQFFsdw8Kcf8C9VqDUXq6Q+HnXs8eQ5kGN7pbTp7qkxsjGSvDRGNkbSno2R9w9GrtE9X3BR4SkBsB0D0Pcl8600Gic5+dq8fi4F0sxo/l1zp73Kasdqhtol3azJ4AHufTQXKYGot6UOElj6Zwbi7C1SDnbXSWheFVt7exTnScHg7ZmkZ+2od467ppMxxt5WjAQgLvssGJ3HWQJLdkjA4H4/k4/+qXHd3xiv7ntp0edJ3x9mJi9pK0/Sd/JxfmaljXx7PNNWzkvSM9vMKvAzHl2HOp/2gmusRNfxcY/9ZP9kX8kqPl03aZEwi0fO13T31BjZGNkY2Rjpum6M3B8r0f2GkTf2DBcd14PPHcWV5sAhcqOznzu5Jz0lXAKUxvK5XGEpaN1InlhT8vXK8EwnrHL5OP7Zk56OuaF5LunIx6Fe6GhKOMmRaJfrOFuSjfNRR54MyROTZkpqvghJn1lJYXLnIsKThwccE46qSuJlrUJJWbyiTbvwoXq3F3WWFjrJH53Ozs72Ko7+3xeEbgsHMO67T3L73CnxE2wIdmqn34QRzQDBx2J7JwIg+SAgph8Pdd+RPHytsc+Rkr7HDhccHhMz0HVw9Rj2WPdxnZc0p8vbdD1qjGyMbIxsjGyMfDAw0t/gSbqxHz72pJMU4kmcDqjva+CkNn7eFaJzXlFxEEs0C74EHvrsFQWRV38SD0psqVI544V6URB4pYHjz4LKA8OTlgNySlgcPyUatndQ8v4ijkFdpiTm/RLwUlfySa++6TNtQF0lUPLvia9UdWKl1pNqAg62YzVLdnOb0D9S4k3x6HProenkg+m4x6m/LWotbhNopweIl2WJv0eUeHMbkRflBlYdr8oL1CNf8a3++i+9uUxsJ6LtUlWSel07R9n5PQHdml/M9DB7BqXp+tQY2RipMRsjGyOrGiMfRIy88qUZY4z3HGP8mqvacbuAJzsynSoV/ueCudH8fLpSTUpLyUzj6rP2kbKNAoA8a17KmBJ5+kwe6Yi+79Rl9mOeHBIQiRxYk14lv1MCyuTs+k47ux1TAPhCgby4/B4EArxUYUw+mPTDCo3aeNWTyTtVSzWO+jABrFWPPFmsLXh8zOQvXlmeJTZ9ZxWT/szky+NuL86TFk8JNFwePRjNY0m3nlSdj6TfpLuq+TMjM35TPtLDuJ4/0najNG4aU2MQ7NV2lh9mOSj5nee2lA8S0KQxm66Pj1WNkeSP8qbPjZGNkYnHxsjGSI3xeMXIZBvRlXe4lmX5gWu0mSb+1EZEMPHKy3UchOcIQrPjMx5nhlZb54X8pgDheORhJksax+Uh3z7PjBdvW3X5zU1JZ2vgxD6zwJ/Jveb4DrApYV8FeqzM+atJCXBeRVYbJVf3S1+UiFzvXCjNdDKTO7X1/677NMdMp64H9U8VdcrHYw4o6ZY++XTenf8Uk17NJB9Jh7MYSUnadTdLlDru1V99dl9Yu5NAv/Lv7mvkgf9n8qUcm8aQH69tB0z5ltV7n79pS9fBx127xsgdNUY2RjZGNkZyzPsJI9foRrcU6vOakCk5q+/aL8evJe001qzfWtC44tyZ0nGO67fzkyM7ucyuR99G4XLM9D0LqlkiS3K6vhIPyQ7uxMnxqcfUlj6V9opTPwpKvmp0BpLef2aXWYJPidXbp9v87mPiOfX3zzMQSfPwnOZZS7bcquPyzvrNdEbdXle/szZ+zKtkPM4+qZ/z5vPz+Eymqv3nSGTTtF0lgekaCDjN8tGM35R7E4ClnLI2P+e8LqA0ZWqMbIxsjGyMbIx8sDHyRi646BC+15rnPfjcoF7B07FZMvH/MyDS+VlC5WfyMhtTY6VbjzOlr8151ZX12vi8tToLSte5O10CCj9+HbmcLweatcBgO/qP8+t7g2cLgGTv2Xfy4HMxic10kkCWCceTbKoKuS597LXkzPnom25n9yECivPvDyOneX2hkxYaM/koI/lNSdJ9aDaOZHG9zOzkOr1OwvSxUsImeYVzxjd1NdtXnvpdRbQfY2a21SsBcrJJ091RY2RjpFNjZGNkY+T9h5FrdCMb8pMj8bMCSN/dOcS83+JOAeRCzpThYMM5r5Jjljicj7VE604hOjg42HsImsnKtwgk0PHEpM+ab+YEs8TtMsycN803S36zPe7qMwNhVka8opmSIKspfpt9VhH1MZhQ1wA9JQnuT05jejV3llQ1npOS+hqQULZUdUlzeUKUX5Ff+qXHrNq4X8wAZBajlN150RhXVa+TvCm/pL6zWGGc0S9oa6+cSmdqm3Q/44NzzXLUTHdr8lBns6087j+zXDt7s1jT9agxsjGyMbIxsjHy/sfINbrnC66Z0ySwILNqmyoD7pR0MO/HZMLjIvVJtzVnSZGO4bz57X/vT2djIKTbvbO+rjf2n83tAc3zVwWP6y3x5K+6TKCXEgyDy/n05Oe3n3XM+yWeHexSwHmgzsBKRPvLH5IcPqb8zQOQMju/7rdryTIB9xpgsm3yE/e1BADUn+w0S7Rqw34+Too9jcPPPu5VQEFdzBZnDpyJ1uLa307leuIca3LOctDaAoC8pf6zvMQ49D5Jh0k/Vy24mjI1RjZGNkbuj9kY2Rh5v2Jk4u28z/TMXdAaUzQ4EzS/K8gkKIMwJRf15W9KyMjpFi8DyZOBB18KpDH2f7SRSeEqo+i8eNQvj18VsMkBNYf047c8KW96m9AsAGbzaa6zs7M6ODiow8PDWpb9H2ZMOnDdcTzyLD34a0K9eqJjngCTTGrr9iAoqA19xP1nlrAkA/tTPp+fiw/5IGVxn+N8/EFI2WBtQcC+M/5cxymxeDXPibHAcfz3Stz3fJHh4ONbUyTvzIdEaUsL+dDnGS9s64uq5EOq2jqlBcFaHEiXaZGi8dzetG/if7aQYH/K6qScnCrTabHXdD1qjGyMbIxsjGyMvP8x0n2OdCPPcInRs7OzmAjIkDuvO4uPOUuw7gxsqx9+c6fyQJ4pPt1adMUrCXLv/GwbgMswk5F91JbzJvk53tnZ9kf6Tk9PL/2oHOVPfPhc5EF2PTk5OZc5VY1c1/49Jf70mtSq/NC1ZGNQO78u2yyYXdfSHwOYbRIQezWsqvYAQMd1zO8QcGHFOQggSrJaHHB8AvFVWz3of7Sd5HNe9J0/oujz8LsWSgKxmc/TX9JvVzCWlEu8cpwWD1y0JT7XYnC2qHLeUozquPNAnVEmByeXQX25+PTxyQf7ujzMd65j+m3KU9Rb4rfp7qkxsjHSde3fGyMbIxsj71+MvNGXZlTNH1B1BcyYY/Ki8jzR6L8SG6/MdcwNrl85XwMzzk+nSEl4dtWd5Js5ItvSoDPHZ3v+KB0rGC4XfxAxAWziw6udDs6eAH1cr4zNnJ2LEOrVK1w6zv8ER09MDtLiyf3PHzZPiYWV0AT+M8BhhXmMcQkIkq2oS/Er+7lN1EY/dEjduT09lhL4Emg4d5IxAbHo5ORkD1R8YeLkAEcdrcWf02zBQvlS/8SfjzWb131TCx5fkPh8szwpf5yB4UxGn0vtCMS+kEkL5Kray3ue168ClKZMjZGNkY2RjZGixsj7FyPX6J73hyTh0m04BX5ydDJdVecViarLwcXATkZKY/C8JwsmRfF4eHi4x5cHvTu5+qWH5pREnIdUIaSsnNdJ7Qi4BwcHdXR0dH5cMoi3WaJkRYnf6byeTPUjfEzS/Kz2nnBmtj46OtoDHgab65/9fcHhvOg79Z4WNEdHR+dbQdwn2J/bOtT28PBwz6+9okfbJzBOiY7jaQzNn8CQP4qY9sRfp9IqOWgzt533I6/0E49zjsHz7mO+3cJJ/uHk1cikX/aTrBw38cktA35XIiVXgoCPwZjiYjXN6+Ml4KfeeZz+kPRFnyYxb5BmgNN0fWqMbIxsjGyMbIxsjLyRO1ysLNCgs8TozpqcgYynpDob24OB7ejAbO8BytvbznMKtmXZ37M9A7qzs7NLe7zp/NIlndITqvPEZFdVdXR0tFdxYX8meQ8mJvAE5ilQ0+1u3uZ2nc5AVL7DZHadQCPJB11/Gsv9k21pq4ODg/NtIeqXKh1M9qwmz4I/JRCXgb7qiwWe1zltXUkgOOPDiX61LEsdHR3t+ZgnfU/U9FuO6TGlOdwHKavaevxSJo2RfE/jpvikP6SH21mtlDy0P/UhXpJuqurc1zmW68I/u+7chpvN5lIljrriYkLVXp7zRU5aOLC67PGv9k2PjhojGyNFjZGNkY2RDyZGjnsF0THGcnR0tHfMk7WYuyqxqz2dj4mFglHZLrg7LcdIV92sxHEuT9weQJTLk70r/zqAqDZ+u3U2hnj1W/KuV0/Wftx1n5wwJTW1cb7o9AJPjcPbyUywCeSTziSn28mrZAQUyp1s50HrPuu28UqOL2h03PXj9mXF0Bdg/hpu+r2ePVDbVI1RH5fD9cCY8gUM9cTFhuuFC6kEPGs6IT+zBZkDQAIrzesJkWPIVlrUcT+97MIxxXMa19vQH30M+qLsTn345wTSaXFJ3/NFnIOHLyA5l3/nOPR3jXNycvLyZVne7dJgTZEaIxsjGyMbIxsjHwyMPD4+rmVZYmDe8wXXZrNZ+ApIGYoCURDNx6vfZKTk+Pqckpq+J6fe8XkpoBJweVJIjutJKH13SgHKcy5/1eU9196P1RsHJI3J5O0J25NEGj/pRkGXErg+pyqey1tVlxKkeKD9PMG4D6V+sjerQrSRV14YfDrvAenBSdlYXfHKCOdne/r+bOGhcU9PT88fzp35QrFcwAAAIABJREFUOPXr/HJcjxMnT8a+4HaZ/JjzmIBlLY5dFhLjNckknVVdJEL3pbRIZNJ00PSE7rJxMSffIe8JyJO9Eyinlxs4n7QP5XN5uVDxRYrz4nGk9oeHh3X79u2+4LoLaoxsjHT5GiMbIxsj70+MPDs7q7Ozs3jBdc9bCt1IDKR0tch2VL4E5cOrrnRPLlQmH46lMelM7iApIJl0fC88+5MXESsBrFTo/1UPCjr5bxl4e+o76XCMUXfu3LmUkD2R+L5nOo54pZPNKq9ebSXRKRPokQev6rm85MkTVQJsB8xUzfNnCLTPV+cFdgKZGRiokk3fZDv5wVp1l7Zh1ZOVJM7h31MyT/7GuJEMm81mrwKcKm20h/PvidTjajaW+0fyFc8b5JufxbfHnid09mEeYgxr24z6Ozh5MiYPCQg9/lk1TbpKOvaFD3Vwlb3V33NpWhhwXvfjputTY2RjJPs1RjZGsm1j5AXdDxi5Rjf2lkIxy0TsRpYg/E7HENMMfhdylli5/5kJSmO5ghncnENt/Xa1G1JtTk5OLgEYnU3tr/NWFncI6sfbeyKS3O6EDnzkgbK449J52N+3C9Buvlda+lTAuM2pE7XhvnAGqTs6eXcbUZ9MAk6e3JSYtT9dc4h3VkrTfKqmOtE3HKipE+rQE5Xz6mBHOcWz2uwqLpeqsUzc7k9rb+1y2ZmkBLo872+5SmOJxxnYVO3H+GzRQB17ldlv/Xt/8u6+Rh2k+Rk78iO+CY5yERRpA85Hf3Ce0mKI/s7XFDsYsSrNNg7a9KuZ3ZquT42RjZGNkY2RjZF13uZBxMgb2VJ469atveAgw0nR7qR3QzNnpMJTwqQBqCzy4s7n7XmlLYdJoMC+bnTNycRFPXkgJgDkvKw46NWn6kN9qE9KHtdxUrVz2yawmtmI8yS5rko41J9eqzrG2PvMJJz2OzuvM6DjYkM8OiDNgN6JOqnar1Bf1a9qP4nO5Ep2pW5dh0n/Oj5L1pRFvPjC4eDg4NI2nuQvtENKYuTLEy91I+Cj7zi/a7qlfCl/kFceSz7istF3ZnImUHU5Ej9V+3ngOnKy8ugLNX5WvpBunZ/j4+PeUngX1BjZGNkY2RjZGPlgYOTJyUmdPVZbCslECi4PJDHmCvKk5c7hyvLbdwwMJk6dS0qX8fiqztSW4ySnSm05PvvQWXgVX3Xx8KwDsMhfZzrGxQO2qYKV9E2eU2WSTkYeSUzAa07PcVIyITDpu5IS+6axCaK0Nys3yff49h7f+yselbC5XUY6SP7oyUTfUyVPPCT/p5zpNrrbzxOdbMg5tG1Aek2LA/VnBZXjc27Jxv9KPn47Xt9VQfQY8cq560J6XwPrWYymPp7UNT4r3z6WvtMvqAvK5X0TD/RdtvfqZcqZnje5qHBiW/quj51ALYFOym9Nd0eNkY2RjZGNkeKhMfL+xMg1urqEcAUty7JXjaBDONNUiDuKG5hCsGLGeUlUngzOqpv6pMSQEuO5gjb7+2Eph59notR4foXNgEuAISfx4Ep/m82mjo+Pz9uzykGdun6cP+pzpgsPtqSPpAcGCKsdnkAcxJJeuD1A43hlWO30lyo/OuZvrhKPm83m0u9QuLyepPXnIKCxBXqam0nck5+DBMcm/2zv4Ek7ut+TB313PyHftLtXrHjcgUE8pIqU+HY+SQ5Q9BtPfvQ/jamtTJqL8/pnjxXZl8fXqsHs6zZN9nIfl29zXvdbj8sUJ2wzAwDGJvXvepzFddPdUWNkY2RjZGNkY2TtjXO/YuQa3fMdLp9olpTd0b0S5UnBb53quAcGBdTxVClg22SoNNZVMqkNQcvlTMbwQEiJPNHstrN4YSJkEHPsma74neNTfrdNSij8zj4cixUQEu3rOkvOzMTnVcZkOw/+pOvN5mIrhoOy693HpY1YmZWOtbDgA7ezCozzlhZrOk6/SPuZZ7YUX/oBUMrLdsmWyZe1r99txr70d0/qjMuqy3ujqVvvk2TzhZ4nR+qKY3sCn/mR5KGvrOUQXzzxvMuk/nxbEsdNbUWMBcrmMnG8Wb7jfncH8abrUWNkYyTn5/fGyMbIxsgHByPv+Q6XC5eCNZ33q3ZWZmbJ1at6biglOk8q7tzuMKR0PFVamAw0ZgpAOklKnnwolrI7SCSQct5YLdG8awn+qnFd1zOgm4E/5137dW6fxx3XdU0ZWYHy/sleDGIHKCZ52i1Vt9xGXpkT7fbz7ukp6dT1kfzt7OxsbwvGDMD5p+Pkl/6/2Wwu+aDIFyDuKzNywKE9WVFzWT1G0gKCOqSM7n+erB2wGR9rC1f29/OyWXql9Kwyqv8pt7ntZv7iNpnlFgIXjzs/flx68b5ehWy6PjVGNkY2RjZGsl1j5P2JkWt0I89w+VW/kx9z56ZyZ8bj+Kk/z6craZEnHVYTXAYGq1fmNH5yssTz7DwrmUxSdCiXVXP6azg53qzylHQ5AznqLFUQrpJdx5JdvULi52fBIZu43dSGdqJsvlik/OTFA1569nOaT/r2cSSjPzQ+xsXrdPkaVreXy6AtSQxoT0i0mYOMy5304PYgAHtMpBiUn7ByRr9MydLHcP+gHeTXtHGytYj74pNvenyvLYjIZ0rIvqCiXfy/n5/FzZpPJP35wsTjIVVneZzgQZ2x2r6mn6Y5NUauy9oY2RjJeRsjGyPvR4y8sZdmeFIj064I0ew2o/p6wnOnS9UF/p8BEol90tWp3zanQpPi09gcy48T3FJQc6xlWfYSl4iVGI7jttCxWUXB5SAf3Frh87gsSXafN9ltVh1YSx5pQZEC2vWYjnuQjnH5dvVsAcNkmvTnidX587a+7YCvMeV8M99LMjtf0i1fjUsdpphz3XmMMtn5XCm5Jp/3nCA5E8DMFh3pXJpzzcedb/q3A+xsITMDkhTvzosvYN1nEw8pxmfyp/mvE/9Nj44aIxsjGyMbIxsjH1yMvJEthZ4kZ45Lugoo2HcWDA5OKfnOePO+OpeSeQIu5439eOuTVYw1cKHTcNuDB+/a7V3XC//owLME5LKTh9lcHGsNxNPt6xm5zM6z7JHGSMkvybz23e2RQNBt7uDuOiFf9A/Rmj48MdDOrlfK7uMmsNcrl31BwnF0nHL7eeos3fonb7MFh+vJZaIdxc9sseCfxQe3Zfkcrrvk57Pcdp14dPnS+dRuxpv7aZKL9phtyUi2mvnnmp82zakxsjHS9eXUGNkYqT6NkfcnRt7IBRcFSIkqBZArY7YdQf89cD0R+FxpDJ/XeUwAwYTq4DdzMD/m1b8UMJybCcK3KZA/r6K5A+gtRCJVBGYOQd7c8ZLOU5C4gztIJptwzHR8dt6DR8fJawKlWUJz8FiWZe+H+WbVLc7rMiuB6XgCBvnsZrPZ+wHClCw4pxYW5NnHJj+ax99kxfazxUECdNer20j8XWUv6s7bpTmoN5InavdR6TWN7XpKgETeU0XXaZYfUvL22KJt05guX8pHKbekuPV+rByq/1VbSZqupsbIy3oQNUY2Rup4Y2Rj5P2KkTe2pVDEZJ6E8cQi5n0M9p+Rj89EmcbgbVfxQKMS2LQP1h/A9HlpSD/PQHLjOE9MNG48dx4e86DQeL4Ngw+7esA4r1X7D8ceHh7W8fFxdH4HxJnDOo/8Lv4oo49JG9Nu1ONsqwJloc7pK/Id7ilPCTQlMB6b8ZSC3o+Rl7Q1wG3m+nQ+OIf060kk2cITloNRsrXbjXZnv/ScQwLhxNPMpzhvqmbyRyaTXIlfb+fbJxwYZ8l9lstSLCd53DcSMM1ihvP6/B5XSX59Fl33x0ib1qkxsjGyMfIyT42RjZEu1xMRI9foni+4lJxZ7bmbW9keTDrPhyVnSucxJnCNQUrG9sBa49fHFzEQPEFxbvLKAGIbVol8bq+6zQw/c0TxmsCNbXnlrj+vVHg/Jgseo2wCZvkKeVyzFfXH/6pucF7NwYol+Zd8SffSTwrYpCfNR3KdzvzcAd/ndn9hxSnp6+zs7Pw3USRf4iMt8g4ODs7fijQDxeSLy3LxkDL1kR4edYBNsea8+mLTAWcGWglwlUtcZ1V1vu9/lgvcFt52Jk+ypVNK2Oy7RrMYd2CV/zNOOe8sFpLs8q+mu6PGSLyB7re+d519+CdhgKqllhpVdRYWVGP7pWpZahHPy1JF3NsOc9H3YujLOt71vfh/Mf9JWkQt2wGT7KfQzelmU4vkDnblWBdzDjC7nPdblqU2Y1OLpNrNfTp2+bvWc8OlxaCamxwu494YmnZ3/OEvelHVI69vjGyMbIy8AiPTXTfRjfwOl4iLdJ6fMUBhk/Oq/wyYZot29fHfPJiBRpLBz7nzMjlwge/BvwaaKTHMQI+Oq8+84PALGXd6B1kHR680qY0nzBTEGoe25AURfcAD0p3U7a3PXnF1WyUf4DYX6lby+u9xOL+sonEMD0j/LL2dnJxcsi2JCybXJ9v4PAIPf+OS65G8qA3bHR0d7enTwcaTEhMqq4tqR336toSU0NO2JvqKbML4os+73t3P2KeqLlVxPSbdf5jP6K/Ok7chb2mBMosr54N5YLYg8Hb8nhb1DtKsbCb5Z/mi6frUGHmBkafP+LW1vOdzL/M6+T47fp2+s/aJrjPPWp/1uvb15iQ9mvEeS1oOjxojqzGyMfJqjFyjG9lS6MpiQCZF06ET8LhiNKYoGX/tezruitdnfU+33xVUCmQHM/6ReJ7bcjyAXI8eHOSTQaV+6e6O28WPc0yep+4IntSLj+cOTz6kO73pJ10QUSfe32WYBSl51vi8hU7/mlXf1C/ZirbxZOTH3cf99bd+Aa253QYcVz7kF61s5/ZIfMuHBSqUw38Pxu/iineCkCcu502yu850TEnNwZOx5kDuslOnzoPznvxH/PB/0p/LkqpaPg7nlk+x+pkWJdQbK3AOmJ6vPHd6fqJ/q/3MLimmmu6eGiN3PnhtjTU9HqkxsjGyMXIdI9Ma9lz30zN3QekCIwUFhdSbX+iU7M+LCZ+HiiG5wjmf78/lOAx48eM/dMcgTr/urT9WKfxiYKaLqvx7CO6gfvElPauaw/F4cUFndH5EfvEhR08XRbSN21D6ZCWMSUJ65S16teWDqgw4Vovcdu4/vLiin/i4TE7J/w4PDy/duVQioO24TYj2IA9qKx1Qp7T1jGdRqj6Jz8PDw0t91gDQdU5deELmuPrxSm6ZoG/J3vpMPph8PSmKH46tvgTbJBMT9Awcqi58j8WKxKfHPW1M/TrIEuzZT/x7gcIXF65vT+BuA8a1/G4mg8+h/+nHdGeA4QucputTYyT883v/ftWH/4aqf/x/ZWX92ffenr/9xlWdPqb0LV++5eG7Xvam42GNvu1vb/n78N9Q9S++6eL4X3r2xfFH3rA+xn/6wYu2L/vc7bGv/MyLY//lh86betQ3RjZG+viNkVff5brRtxT6ot3Bwq/CGXC6YOA4dG4XigZ0Y/MiJilAfXlBM3NSN4SClm28suj80WD+yk0aknuMUzB7oiDPvGvEtkw4aifdMkBdzlSR8PPuxG4TJRXak8lLnxlk9Avpa7PZnPuH5Et346gL8iE+XY9eZebYx8fHlxY1tBUTYQIdt53L5YsYjctxNL90oItU6UP8eLXt6OgoLsa4fSMlYf75RTT9hWPLhtQB500JNyV8T4ySm7oQH4wT9U9yuF49T7APbZ8AI9nHc5jbzgHbdcrPBCPyk3Tl/u0gI/LFj+uR+kj68Xi5Ckya1qkxckdvfH3Vz/9k1etfkxX1ed9c9WU/UHXr4Xz+V4N+5bVbHt/wujcdDzP6p19X9X98bNX7fmjV73y/qs97YdUP/ZOqlz53+/+zvqHqV3656sPevurkOI/xEz+yvTh7299c9fyXVH3N51X9gy+ueu2rqj7sE6ue9Rurjh+p+ovvU/ULP11n8IfGyMbIxshHh5E3csG1doHB4PEKGits6eqdRpIiqMxluageXBJss/+M0/Hx8Tk/MwWRV1+M6zivzEVsr/+qqPi4M/25Hjkuj1HHLuuyXH6ejDr0YPAkwfEYgCTdiZvp3Z1TfXQBRqBX2xSwXIDwos4vHMmz8+568wUEK20eOMn+TLTi3/f2SqeqTtNWnC9dfBIUyIvmZmLQsxdjjHPgUzvKlJLR0dFRHR4e7t1lpN/OkimTjQDLF3Ocx+/I8q6n5wP3UfoBY58+QZ2mQoOPS13Ih3Un1yujlNl1S1I+4MKVBQ7PWeKNd4Eph/oQVFLcimZ3JTxuvSjEvJsKJuTdbdl099QYGS7Ovuozq5771Kp//o3b7x/3ftvvH/4OVR/xjlV3Hql63tO3x/T3ke9U9TP/df/Yc59a9bc/9WLcz/+jF8d/6j9X/dHfst/2g5520fbVr7w4/tI/tD32XS+7GO/LXrI99/Lv2X5/8XtctH/kDVXPe8vL/P3sj198/9wXVH3312w/f90Xbcf4ghft9/mFn5o7TqI7j1Q98vqqh9+s6qEnVb3htVXHd6pe95qqs9Oqt3hG1dhU/fIvzcc4Pdle8B7dqnryU7d3E3VH7MteUvVvv6/q4z+w6mP+StVb/brGyMbIxsgbwMgbvcMl4lW4vrtTuALUTv95dZ6CSk7JKgbnppN7hU2O5InDk4nm9qtgnhdPfrtS8/jFxMnJyfmxhx9++NIdL7XTfz5kqTYpgOnQShgMYurc+fWxZ7ZNCcGDj04qW0lfXCQwmI6Oji7Nz8SdEl2qrLLaI97cfqxwiEfpyqtSHkBpgSRA8WTgFTX3wwRMbgvOlcbyBJq2s6Yke3JysvcwcrpAJeBRV7Kd/Jg+7UBGG0l2jxHqIy3K3HddJwksOOaMdM6rjhrTfWuNZ8WF+urObtqKwnnXqn+qUFI+2kDtCEg87nzPwDUtoDkG+fc82nR31Bh5VmeMyRd+WtX7fWjVZz2/6l9/d9UXfWfVP/6lqjd7atUbX1e1LFXf+Mrt55M7VX/vv1b9zI9VffIHVX3xd2+P/673r/qYL6r6mv+t6u99QdVf/XPbC6ZP/9qqt3/nqhe9S9Xnf2vVweG2/Tf+/Pbuz3Ofsv3//Letevpbb8f7wW+v+sz/peoDXlD10Z+55fHFX7jl6Xe+X9Wf/z1V//lfVX3Fv6k6PKx63tOqvv6/bcfV38/82PZO0xd/z/b77TdWnR5vPx/frvprf6HqO79q+/3zv3U79jPfdjvX855e9YG3tnKv0R94YdWf/sLtBSu3Zf7Vf1b19r+16iPeaXun6ptevZX7bujPfknVt/xy1bt9YNUXfHvVb/7dVTuMboxsjOSYjZEZI9foRtBzdleBDKWrZyrbX5XpV4660nYlcE4qTN9p5HRlWpWrAZqLe0Epo4ImvaSChneHoEFVdUmGTIHHgNWtb12Ukf+qi19HF4lXXgDpO8c9PT2t4+Pj8/MaT/1YrUrBL55cJ7zwnC08mJC98uMXmAoizuX+weBNiVrjHR8f7+mai410QUkfcR51nElQvi5/cL0kH/ULdcpRta0QKZFTZs6d/FuxpnkUg0nfIgKKdJ5ebSs/ISirrfyR8/HZPl+cpUTHOagLt5Pa+xgO2uQvxZrr3kGZ84gvzxGpOkbZZQsWJNxmlDuBgfNOm1BXXol2OVzPzIO+KGm6O2qMPGcESjmo2my2d2WWperwaHvHhXR4dPnz6cnFhcRmbD8vZ9txNNbBUdUYu7ZHdf4QksbXVrvT4227g8Ntv9OTLV/n4x9u5x1jd0HzW6pe+JuqXvfft2McHm1l+I43bO8Uff3PbMcg36Q//6VV7/v87eePfZ+qZz9c9cpXbOf+x6+q+s5HtnOt0dhUPf/jq777TtX/9OKL4x/7+6te8R+rXvafq576llUf/IwtL3dDm4OqL3xR1Q9+R9XHvW/V+x9V/fxPNkY2RjZGXhMj1+jGthRq8nSF6At/Koy3K6l8BiUD04kKpwMw2MWXAnCz2Zx/TlemXNTPeGCQqE+SnX29CsixOZbm9rtCmpPB4HOzIuDOo/n8KpyO6HPOEltq59scObYHPG3DxOQLA6+sM/ClL+mQd66oF/qnKlf0C5dt5muUnce8CsM/JVFeIOs/bRjf6GX2pf50sc6kRH/VOKwCyT5+8eoynJ6e1p07d871zLus9FG3Lf2OdlV1dFmWOj4+3tOFA4f+z7YbeFtfWBLsE5FnEfWhvp6z+D2Bj8vi8zAx+4KXtnXZmD8SceFNubnIYFsnFjyc91RdnOm1aZ0aI3d91tYjpyf7zxydHl8+X7W7GBpSbNXp6fYiZLPZXjDoQmtZLt/h0fi6IDq/0DrdjrnZYdjYbP/OTqs++0OrPuBoe/Hxih+t+rv/peopT98fd/cbWecXS2M31vnYVXV2tv38v3719mLpnd99e/4j36nqta/ev2Bco+96WdUHHlV99eds20pm8X62m0/fl2Ur997zXMaf9PfXP67qvf5Q1Xs+p+pLv3d7gQlqjGyMbIxcx8g1upEfPnanStU8GcqdhQ7KLXG8kuQCekabzSbe1fH5fTwGgc8xe6BY8zm4eLJxXmb6Y1WBgMzxrhpL8vmFDqs8PiZ1445NuTkmbeLt6cyedJ1Xf4aBidNBfoxx/nyB8yReOE665U57iTcdc1uqD+8IpsBkAiJfBwcHl/Z+n5yc7L34I+lkNi5jibHlNkhjUnbXh+uP4/Di2auBKR7JrydLl8Pl9WPil/lCPuO8ur8nXTnfvMvLmEv8MLEmffP8Grl9vQ99xv3JY5K+Kp7SAoTk/p1yHsdIxYjrAErTZWqMtG1OB4dVDz95e9Fz+ND28+ag6uM/oOo//attm4eeXPU/P6vqW395+/349vYlEL/2Haq+6kcv3qD3/31z1b/6rqoXfFLVC166PfYrr91uDayq+r9/uOqt3/aCqQ9+q6onPaXq2167/f4PfrrqBb+x6iXvW/Uez676rH+4Pf78l2yfjfryl26v7b7g26v+7l/ebil80W+/4LFq+yzVqK0cY2yfq3rH31H1Jd9b9Yl/cLtd8uEnV33tF2z/SA8/eSvPWzyj6kOeWXX7DVXf9vqJBXf07I/abof8ipdWLVX1aX+v6j3+4PbvT7971R9/1y0/3/yarY7PzrbbH5/0lO2Wyqqqd3zX7R27T/gDVf/mn1Z92CdUfeSnXMzxfV9f9YnP2Y4jP7NFdWNkY2Rj5N1h5Jgly+vSZrNZ/DcKZgv3Xfs9hSVnooLYR20cAHyclOSpaFbikkN6WxKNJT7dGZPSZ3rmBRdv0VNPqVokvfCWvcY6ODio4+Pj8wDk1sWZvsj7smwrNrdu3Tq3YeJf8yV7ug04ti52Tk9P9xYQGkMPVHJ8Bpp0pYvJNL7zwkRwcHBw6fkEVm14S1/B7Hu/fcFDW0m22aKIPNIeKVk7CBBsPLn4nvi1BJsq5u4bjAHKk/wyVbdnsUpfoz7djqmAwDFddw5esrNsTL0ILPngtvOdyGM7AYvHr7dNADHTkRNl8/6UjRVXLYycT+qKMcNY8Lsay7LUnTt3Xr4sy7tFBTVdosZI/P+9f6TOPv3vX195v/yqqg95q+1WwH/4c9utcqenVf/+n1W95P23d2M+/euqnvTm8zFe99+rXvAO25dEfOPPVz3trS/uRDVdi57ywneo5TW/2BgJXvi/MXKfb//+IGHk7s5nTDA38sPHnEzkRkkgkhbnVfuVvQRSnG/N+HSutCD3edOdDndmju3zJNDT+QRw7lRyBD/PPpSH7ZOjePWCSccd3UnJJl2sSYfaLuJ8Jid03eliS1UttdFY7tz6LN5TpVdtkz8pcUpuyXB2dnbOQ0r21B/5SJVl8aSLXQYzberVNE+i7kdqc+vWrb0tQCIuaqh3feYeaOlecrFKyVhy36W+1U5bnVxntLt07L7ofu/ATJ34+O4THI9yczHAhULaWy9/9iQ6W1Q6eZLX+BojASfj323n+uE4s4U6/dR1xDlp++SjfFjZF83pAeqm69MDj5Fj1F1tSn3xu13cofqLv6/q7/zI9q1+n//R2+M//u+rvuqzql78RfMxPuk5VU9+yvbvxe9e9XWvuBsOmqrq+C3euo4eelKdnZzUqFG12dTB4UEtp2d1NrY32pazpbaf6vxu2LJUndZSy1I1NsonSx2fndZmtw1zbEadLrWfFzfy69reZVvk2ztf+uVfOv+dtsbIxsjHE0au0Y3d4UrExa9fOEhIvzA5Ozvb2zuu4/7dF/F+2xD8XQIcd7xUSfQ5OfbMmD62GypdtPjCmufIRxpPi3e2W5alHn744b07X7oAoD54dT/j4ejoaK+6oT6zizy3KZ2PD5WmJCPeHPCdP22t4a/A62KKgc+EIGLAuh5TUvILWwZUWpCkZEx9cuuodOn+xwUQ7XV2tv21e+5jn/HofuK8eGKn7dPCjDISiGh750EvXuEYnuzpgxyD+lA+cH9d45M8+Vy0hSdYXnBTV04pN1Vd+LjLlfRftQ9QKXcRZJKPcWy/0+y8u6yUOeUsju25tu9w3R01RoKH3/OH6/Ru7nA1NQV6s8/90Bo/8G1V1RjZGPn4wsjdC2YeuztcvMIj83RsMsgrRS7mpQC18d87cOE8qScH4yKcC3mBSDJOAi0PYL9Q8MBMDqQ5PdDl3CQFUXI4jUcHVPWBF0bUM+X0oKGt1IcXMR6ctCFtO7PT2t0uHvO53AZjjEuVpvRqWV7YkFgVdp26HBqHeku3ud3/KDO3IaQLTgYz9Zsq3FUXv5PjPPvFKX0mgYbbOtnE+8wqN/Q7b+d6pe7It/TCpEkbaJ6rEh7tpHlUYaSc7ruzPFBVe9tek00om/rNKv4+/lXxoz6+IKa+3c+cr3TM5Z/dcZ39b7p7aoy0O2Ov+tkar/yp2t4RGfh/Lgm+7+6QXPK9pWqM85sq16XtMBhb0+H83rFx8Vl3Xi61n/FwMQ3m3h2yuzfn/Oy+L7XsnwMfF3OPSzJIvj07j/3uY/dtyrfLsOzrbVmWvTHHJRvKV/Z5ujhnPjtpSFzdAAAgAElEQVQ0xs6nNm6ELZ299dvX8pZvs21TjZGNkY8/jFyjG3lpxoyoVAcQ/uddCh3323rpKprz8EJBY/pVcrqNOuN/tsjQWOkWMGXixZSIt2nFj45xTgfJdJ7zcS6/qEiOQL55zh+ATrqZVQeS7H6cNiLPnoiYFBy0tf3Qg9tlUoDRh8SbB5MvGmZJgPpy3mfVECYXfect++Rb1IeSGS/E3LfSRXxK4rqIV5LWnU/GGLd4Und+ke72Sknbn7NT/5ToaD+OI56oPy5eE6jwu/jgnL4VQnN6ccPt4+R+sLbdw/uJD/eX1N/5Snd/XY8+H+1Mvtw3XU9+kdB099QYiZy6a7b53q+rgy9/6d54M3/m6+c9V/tuC+osjSs9Clv8NfqeUzzfaNdHyuuJ/8QLxxbRJpSLdnDckww8no49Gox0fVBennMduz8ljHQbCjvYV3dsfJ43/onPr+Pnfcz2Aq0xsjHyCYaRN3KHi8bQZ09yzhSBhN8lqJyTgZKSofp4QtDYTBhUcqo2etJmAHj1IPXxJOwBR6eb3cXwhOhz6rOSjDsUZRagODC7fLKXfmuDFRN/FauOM/BdDtprs9n/hfIEmnRsPW+lcXQH6+Dg4Pz1rbrwEh/ubwJEvQJez5qdnZ3V8fFxfAU//TElErdf+sy+OpbukqVKSwJq3eF0nhgbBGbniYsrxgSrgBqfF4Aa1++UpoWIfyafDr4p0ZPScQKZxkzx7P3dDh6/7tMpztwePpcvsmaAlkCB43guSosszzMevw7WMwBnrPncvvjwxdF1QaXpMjVGaszd5+VijMbIxshrY+T5dftojIRtGiOfGBh5zxdcdCIX0tvNnKsqK6GqLj2fk8hBIyUG8kEe0tWukys1nedYakuemFzJ9+wCjeO6njQXAZxVEh1jpTHxnOSRE/t8lInPIxB4bt26dX5MbXVediTffozJguMzIboO3acODg72ftOC+jg93f6o8507d86B5uTkZO/3GKjT2SIlJUW/e6l2qUJDGzp5UnFdONgT8D2BOrBQRvHLH1tUdW+NJ313OXnMF4Yua1q4pTFSpWmWWDmv5yFfFCQw93auT+eJfT3uPbY8sc/mpJwpNgnsnFvAT7/1POf6WeNJn7kAEU8plzStU2Mkct7YvjRDozVGNkbeLUbyfGNkY6Tz+abGyLVceWPPcKWXMJBJCpgU7hUCPtOUgpfHfV4mlsQDFU4n8GOszLgD+/yedMnHLGmw/VpwpsTmfK6BUiKXVUHCxK15dUwVM722XbfV9d35EMgRSCknz4knl0P6S/qXjsWj5E7BSJuokndycnJesTw5Oak7d+7s/TiiAIjJl0Agn3V/0Xm1Z1vftuk+SNJxVui4AJn5DROOLywYSwRxzc+3KtGPkk+lxOoxOkv2nvQ8uaVkuAbm5CGdo52Szl3/vuhL8nhcJ/Dx+T0HzeZOcruP6Rgr7DMQ1pwpL6zlN5eh6dFRY+TuvOWCxsjGyLvDyJ18kLMxsjHyiYKRN/ZaeCZeMcLnk0gpWfK5FgXMDDxECZA4/sHBwflryxPAcQw38gxEnK+U9JND08gcz8dIQOug6WNTdiYxtfe3z8z0pf5MYA4Ut27dqsPDw/M/gYnriYn98PCw7ty5c4k/B2yCgN8Wp40cxNk2ASvbeyVC1auzs7PzX3gXyKjCp3Npvz/BI1VxXB8Ebga4y5p8gHHBqiV1QJnJK23r/k2wo4+s+br7iyfHtPCZ2YTHZ8fS/JTRfY/tkz59PtdVauc68aTv89DOnpjZ9io+k1zUc4rjqot9+ZSLdrvqzpXrx3XRdHfUGFlVs+PVGNkYeTVGlvisy77dGNkY+XjHyBv9Ha6q9a0FzpAHFRONqgguiPdxx6VR/PcY2IZtk1OynfefOfgsYFhxmQHkLBGRF5c/ASLnYwLzhOp61W1XndOWAx3XVoiHHnrofGyv/ngSp8x6CPahhx66VEmS/EzIWgykxOYPmXoQidIbvBxs+Btg3ELggKJtFmdnZ+fbLVLyEJ/UMYGAvKTPkieBPs97P/JASvGYQE59WQWVDanrGbAneTy+3O7ed1Ytn4FdWmy4PpJ+1/IJ+VjzL8oyi+kETLNcscYrz/u8XkF0HTNu0gLH7ZYW3lcBXdPdUWNknb+jrjGyMfLRYiSpMbIxcjbvrzZGrtGNXHAx2bqSZobU+RmA06BMWP57SJo/XcHy6jWNq/akZFhWRmaAqM9s6/zMgsS3hjhPybj672DMoNX4Mx37PmdW6Vidu3Xr1t6ec4IqjzEZ+Jzci84Hfse4eLg5gZGDoFfJpIdbt26d+4EnB9etv9FIc2g7SFVdephY2ymOj4/39rhru0VK0OQjJWDNzbePcQzZkqCaApx68+qok/ucgJkPCev/ycnJJd8Ur56gfHGSEtWsvRPBuOryG5Sq8gP9aRzyPvMJj2l9d1m90pz6Ub60eOD/5IP874sVz1u+gE56mOU5P+75zH1wpr+m61Fj5O5z8J/GyIu5GiMbIxsjn9gYuUY38lp4OnRyFAqbhPbk4JUntvf/TGQyDAHIleAOLxLPKUE7rSUql9mDyGVWvwTIAs8UvOJtrbri7akvgsAY4zyRKnkcHR3VrVu3aoz8ALASC+1F3dPWnkhdp9zrzotW9Vd7ysvjnJd+4GBEXryKd3a2/2OinOfs7GyvmidguX379jmoaG87E5F44K/Yc3uD+5vkSD7FxRR/j4eypQWVEgH7aV7akjpWP+ro+Pj4kh9xESc5Z77ni0DO43HoFWrqiX1myY26cpCmvjgm53agTvP7eB7HKYkn2/I8fwzbc4ODofPtfi4drwENSTKmvOaLvKa7o8bIi2O+MGqMbIy8W4yc+VRjZGPk4wEj12j1gmuM8V5V9RPLsrxypc05My78GuOzY+qrfdFy2LWrZSnZr/ipTCYJ7klOTulgwCtaVshYVVwzmualo3kfjkOj+u90uVOLF972r9oHaNdTAhIBh954pAqeJ6xZgmbSoK38dbdq72DtAOI8Mhl7BUoy6e1B/qN3rgMmXc2vh5PFr4P00dFRPfzww+eVu6Ojo/PfZLl9+3YdHBycV/b0RifazG9zkyduTXD/YEzQfhyXlVX3Ez8mku8sy7byuSzL+XYQB1m29yQoXdFPGYe++GMbLhrpT+TBY5Tj+ALKaZYcuRhhbkn5Rd9nwEke3Z5J7rTYSzJS3zNZCZSzNi6b+vEh85RbZ3ytAdKDSI2Rd4eRazpqjGyMTHaexUtjZGPk4xEj12j1gmtZlu9f7b0jMUMF0+FpaCpAe5bpiDIwf98gCaLxfU7+d2VX1d7rQWcO6AoUqKXbtt6Xi2Y6lR5MFl8cx52WDs+ExwRAHegckwDB1PXmoKGq3dHR0flnJlWN7WDgAMA5uBhIAVxVe9sW6Cd87epMF+JVPxwoPep4Cg7p11/ryt8xUaApQXuSlJ6Oj4/roYceqtu3b59XOh955JF65JFHzh8eVsXLFzySgdUafZccCRh4zv0nLbroU/Jj91F/hoN605uq0j5/T06kxDNtz6TrCVj8JXlTPHsCdbBJAOz+yrji4sVlZdwm8PcFDnnhomwGjL4wJb8OIjN/cPD2XEh/I9+zfOhjXQUoDxo1Rt49RpIaIxsj7wYjz+dZzuqwMfKSvI2Rb3qMXKMb++Hjqsu3zRlEYsaVTedPSdXHqNq/omdgkLwPxyGfLoPGpOPqnCpkybGTk3HOGS8pEdMpCWLUrUDA37KiihITORO/Erg+37p16/zhXyVi2nJZLvZ+a34fi8S91EpunkTUh1VCje8VOR+T/Tk3/cirQjxH4KD/8bv+BIriRwlb9tEvzrOa9/DDD9ft27frkUceqaOjo7p9+3ZM8s6/xw31yURP/0g28TmS32kBJ3n1fIAWINxawfFlUx3ntg0mPn33yhHtQR24nWhr+n2KJ1aHya9/Tj7iAEZ9UY4EIp7X5LvelvZMIKK48wWV95WMXnVz4ONihbaj3Ti2g1xa9FDnHu9N16PGyP2FFL83RjZG3g1GVlVtxsXxxsjGSPV5PGDkGt3Y73ClqhMdjOAi8qvGteTP8xyX39XPiRVBGoPt6cgECgdHtZkZ3oGRnx0UWMWhjC57AmYPau7/5X8lPbXdbDZ1dHR0XqHig7+pGqexmCBSkDuvVbUHDPxOoFR/tlUy97YelFUXFWDy5fMxcaTFjSdlBxbaXa/Ale7u3LlzDsq3bt06r+YdHBzUnTt3arPZnD9I7P4kHjUu/ZWval4DCZeHccjEqTl8EabqJ/3KK+JcQBHQnBe3qdqTZpUjfSeIub3Yh7InXTBG3Z+XZYmvwr5URQ38p/aUg3MxFlJ1VMQ7FSk/uLxa5Ig/+gmfsVDflKdSnlxbiKS81nR9aozcybe5vBgj342RjZGrGCm+dhdBjZGNka7TNzVGrtGNvhbegUPH6IA8l6qlrFypHStUDATe5lRQerCqn4/rRvUgpCPzSjY5gwNTAkCeT9UOtvGKhsZxp3Kd67scTbwdHR3t/R4Iv+tP4EI+uH1A46nSl3RctV/hVGVIx6Vjr76pWqRz1A8TonjyiuUM3NbaMIn7cQcTykpdMAlwO4y2T9y+ffv8oeE3vOEN5/vXZ8k38UcdkOSTs/6zxYeqkQSYk5OTeuihh84BzytbXslzfiS7J2ePlSSH80iZjo+Pzytb3lbEWGPi5HyJlzWATrym5Evy6qwDvM9N+6QEz8q8t2MuoA8LSNynU/U3gTXBibxzYXodUGma04OOkfrF2sbIxshHg5FVFz7VGNkY+XjEyDW6sQsuVo+YgB04eNXvFT+1kyJ521bCiWZJ2s/LCJ7Ak3N45SDNS4cgzy6by6RjdC6vzqSg80qiA4eIlSrKKyfQw726rS89M1hZlaKuND4dlYlWFS+CiD77W400/hijjo+P99qxr+SlvTxpaE7qkHK7PxAgdU7VPx+PoOg2ZsJ76KGHzqt0m83mfDxtk1B1tGqbdLWHnbZye1Pv9KHj4+NzO3qCdD/hednVH2Kl3wgQmDCoR68AM1a4ePHtC5zbY3a2WJSNHnrooagLxonHgx8b42JbB23PZO3ySt+uR+c9ATnn5VjelvPRdxmLnE+8c3GlMUS+3WWWwyivFuIum8d/071TY2TVyWTB1hjZGFl1DYzcudTZ6Wn50rYxsvb6kr/GyDrX45sSI2/0Dhev9KSYFOBilMeobDku99CSZoDgVYbkhHIiT/A678b2cdxZSUm25Oy80p857MyBPXEwOKouAmaMcV6l0z5wJXI5D986RBm575i8+FueqFPaW39eKeJt+ao6f5Uu5fIqkNtEMtJmrDZ4UiKf8k+2l1w8Rh7cPu4r6s8Hk/mKYL6NSW9qOjg4qEceeWQv4LUAYwWKsgiYKIt0y+qMJx2NJbBQbKm9/JFxRz1zXPmF70tPfFB/lEmfU6VNiZ8AR/9kfHtMOfnikbwy1zgfIr/w9UULF8z6rHYEST8mkjx+XHok4ElO9uX81D1Bm7mY9nAgdF+ZLbpnum66Pj3wGDn+f/beZDe2JDvXXNv7nnTytKHMrERONRI0FG4N70PcR9RYo5oUUC9QgxIgCYKABCLjxInTkfS+9ztwfsv/vWjuQUZQlcxMM4Ag6b63Nav5f9vLltkubBeuyxyZOfKxHKlF284cmTnypXDkufJsD1wqUO28KjsCROp7VX7qtCQVRjQSrkkRhi4RRiI5JTytm+sjsem4tA+qmJSx6nc4sxpfKtqp39GmRk2JFOGQROkgEH50nFEWyIPrtG8aGVQZcW0kJa1fjVn1SHvYjPZLo4ARQKPMdUwAJ86qpBXb0bZP6ZoIp9avhAVZs7xvdojqNRoNm8/nfqQwkb5qtWrNZtMWi4XN5/PSOFSGUScRrKNNRPBSoNe/q9VqKV+esSmpKhFotDT6Svxf+xonTtF+ol8qOFPq9XoJBxQ4qZO2NPrK2CL26D1m5f0PmgoT/ZiidgC5673aprYbcYu2te/avxTAn8KeOMGN96jtqG7pj8pI9aj7Ps71IZfHlcyRx6L1ZI7MHPlYjiQlNTVBzhyZOfIlcOS58mwPXDGCEqP1OkjtuN6jqRHUqXUo8SAkBTnth7YTDUoBJNW3c+CvbWvfo7GqkgE3rRfHicuiKeLV9jX3VMFRDQHwqlSOm3+pW4kqRUz0n35zr0aOdNyAGICq7UQi4HsATcet+kjJV3WppKXRIO5vNBo+HtULfaReBTy1Cz0SF5lrBAkCgVT2+2N6xW63K719vtPp2Hq99rYZ22q18nvjQ5fqX+0X0FIbYlwKZOpbmt7C+KLN7XaHTcCc3sXnqUif/n8uwqXXqHyRJ/0+ZfOq31hnJBX+VhlEf65UKqU9BJGg4+RN+6B9i2SnEWv1e9Wl1hH1G7FC66UP6DxGZjW6egqz4gQsYqjKQvFQ/TXV71yeXjJH7m1fPLw+c2TmyKdwpI4z/p05MnPkn5sjz5VnPaXQ7OESrF6jhsO1p0hdcy3VwPV3dDr6oSDG5zHXPQo2ZTjxu+igKYNUw4jRiugQfKZOl1JsarwKKgAVoAawaWSi0WjYdrtNOjJ9jX3RdAe9VpdplRyQA1GgSChaT8qgta3UZyl7isQYo096bZR/JKH9fl+K9ClRUFQvMZoFqPEOklQf+F7rgoAUOKPtRZ9SO4nEo31X2fF/zBVPTSiiz50C/EhgKluNrMUSsUDbSvU/2kv0+5RvxXIKuLkvtRdG+6HjVr9XXOB/1WVsS20hhX2xr+p3KVlHTI26ibLVjey0Fck5+locay5PK5kjHx6aYfYwop85MnPkOY70sZzQfebIzJF/bo48V571PVwU7ZgOKC7ZUVInvKgwIyhHgcZlypQgYhupfkfF6jWqJL0Pp1GAjH085ZD6fVwu1Xqiw6phE0niZKRms1kCSdom8qNP/Pt9OS0igkqUgxKCghT90QheiiSoX+uO4z43CdHvmGRobngKRHRMKaJSIo591nFwTeyP6pwIEk6/2WxcL0ocjBMCJgqoESicHsKKwI2P6TX0R3WvAKR16ffoN0bcIZ5zUXhkGJfd6Q916D2pSNKpCZy2GScKSlanCELtP/p3JProa3G8sY/6XZxgqg9riVFsla3io441BeSpvkR8iHJRXetESPufwrFT5JzL40vmyHtsCjLJHJk58ikc6bK+t+fMkceSObJcXiJHPuseLhWsDiY6pBpMFNQp0Oa7WKJhxihU6n69Th1Or1GH0s8AcByPa7ScM8j4fySc1FiJZOqpQLr8Xq0eT01S0IukEx1aZRVBCEMn2hSJg/8jyCixMA7dCKykoi9M1P5Eh+R/JR6z48bbVHRN5RoJKtapbUWd6pj4XEF5v98/2Liu0U++L4rCI6vImogdkVVOpFKdR19RIlE9MOZo84C5Tug0LcbsmG+vciyKokRySoIp0kiVVOQuToxSJQWAcXxqf6cmgOd8Su9JAXPsc8Sq1ATvMcAbI77IFb0ouZzDwlPEeOo6jSaaPTymOPabPilhP5ZUckmXzJFmFu7NHJk58ikcqSVzZObIvzSOfNZTCmOJYBNBWYuCjRohA4q5mNEgFZCiQ0WHiwpQZ9DrUtfqdTHFIfZFxxXv5bvdbuckoaQYDVRBng2+RXE8ZUmdK+ZCn3MSDFijeNTDZswI4tTHPdyn90a54kTURdtqE5HwtC6+5++45KvtxchUJBjkHm1Tx2H2MNqhdWvR66NuNYJHPZvN5oGe0KtOUuJkJzq06hyZ6NjVJ7RP5KHTlqbVoJN4DG3KZ1UX0TawvRQYxWvi+KJfqd5SoKnXpEBWVw317yjjVN1KanqP2q2Oh6Lkq3aktqa/qUsjoZEwU22fskWVZYzaUn9c9VCdqH89hkhy+WUlc2TmSErmyJ/nSH1xdiogkDkyc+RL5shn28NldhSggmnsrAoiEocWVRCDUtBTA9YJqtaf+u4UuUWDpO2UkUWDjP2PBnCu6Ds4ThmsEokeYwsYsQE2gkv8UbkrcCpRm5lvDo394xruVTJRIotFbYH/1dFKgBr6q9EOta0IdCqzVOQtBTZ6rxIP7bGn4RyBxML1Gp3DbvU9HvSt0Wj4tavVKum8OiFQANBImtpOKgKp+tUNy5Fw8DPqi9G6KAOdAKUmcyng5v+YDx79TttQ32eDtRKRykr1rd+p30bfT5HYOQBV/ErpiTpSxJCaaOpkIDWOFJ5pmynMUZLWPuq4Yv/0+tiH2OdcHl8yR2aOzBxpLrtfypGxr5TMkZkjXwpHnivPtsKljcUn7ygQiipcjTkSUswtTwG8GnaMpmgfzvX7VEkJW40iVYcaWXySV6VTXypiwbggDQikVqv5MawKdJoyQb/UyWmHeiN4aYQqphGorHFqlYuCpJbUpCKmUmhbSkoRUCMoRXCgDo34poAmRg21n5VKxaNrCqjx2tgX1Tn9gYyIyMQXSOrLEFN2FCNges2pyUzK3uP3tM+xt/pdBEedzGn9kZi1bv0/ji1OJvX+2HaMPKlPpPpyqigh6mQnhU/RLrTPqodURBW705LSTewb90bdnZoAUe85/cexRxKLvq0l6g/9R3/K5Wklc6SZWk/mSPO6tC/cT32ZI4UjrVwyR2aOfGkcGVNftTzbCpd2OEXKUUkKoPyvYEpJPa3GOtW5o4JPAdLP9U/Hpn2ITpAywFN9YIxxDCljUYAmHQLgrdfrpY2mGmEDxFS2+pt7TkXAFDy0Ph1H7KPWic5UVipfrUtJg/tjpEwBKjpJbC/WEZ07OqpGfvhcdR3vTfUx1VeVeayLFBRyxvVYYa7T8cRIXGqJG7nqeKIc1UZVLtFmK5XyBt4oH60zrsZF0FVb1wlAlKlOnlIgqu2nrtGiwJ4aQywRiE/JWH1Cxx/1HH+firZR4oRO/04RU6qeSK6pSeCpsaucU0TGddhXLr+sZI6871NxrCtzZOZI/fwxHIkB6VgyR2aO1L7+uTkyVQ/l2U4p1A6fazAaRzSi+F3KaFIGHgHc7KGD0U4qUnaOrFL9PKXUlMBjH2Pd3Kt1E7FjEyk/kAuRvFSUS3OgI2hoW/q3gkgKwKOjRCdSI9Y6Y11KArFPjFnbTEVqVN46CYkOn5pIRBnHd3CoTLR97UcE6qhnJTON2MX66TsTBI2oxehQBLdIzqnr6VMqFWS73Vqj0bDVavVAr/FI3ChL2o/AQ4QH//q5iSD9iKAYfS0VFTulb/0s2q/26RxGpewzjj3l03FifIpoUxMYtYeU3WqdqX6k+hiviZPilJ+oDHRskUhzeVrJHMl3pyPGmSMzR/4cR1YqDx9mMkdmjnxJHHmOJ5/1gasojnnjsTNcFyNfDAajiYM5N4BUpCcCTvxcS+relPPGPkQDSPVfx6v/p/qW+l2pHF/IqCkS+vd+v/fTe6Ihp0iPelM55GYPX3oYZaT1xLHRXurN2xFwkKemgaico7HznaZZRAdO9TE1huio1BFtLxJraozatvZN89q1LtVHHKeSqUbPYtsq+wMBVUrvMzlnkwoo9FP9NpXuEstj+nbK56MOUv6lk5JIwDHyd67O+HdsL/riuXFHHErpUeuNfT/Vlzg+lZf6SSqIo/iamkClxhX/TsnslEx+zi5yOV8yR97fmzkyc+Sv4Ejev7UL12nbKvvMkZkjXxJH/uoHrqggyqn0k1M5xbGeuNyZGiT3R5Ki6OlBkShSS+lxHCkBnhN0ivxiZDjKKVWnbvolatdsNq3RaJRIhbFpnnEcp7YJIGs0RyOcmt9OJCY1lpjjG/Wpf3OMqt4byRpgUyJVsABsU7KP9qPH68axpyYvSmQqG+0PfUnl7Mc2YhRQI3nadwUP7uNHZaa/U+RK9Etlrn1OpQ1w7Wq18rZiikSciKSIQ/UU24sAGOukrmhfqXZPRd2iDZ0rp/w29jE15lhPSqeRqM+RnU4gYqQz1Wez40rEKQLQz1L4Eu1WJxqpvp2S52NIJZdyyRx5/NtHXMhnmSMf3Js5Ms2RxX1KYe1et5kjM0dSXgpHniv/LYdm0DE1OgXXmK4QN/zqMni8J2WUpz5LkYaWc2kyKcDSKFkEUD6LjkY78YleFag52tVq1ZrNppNFo9GwdrttjUbDP+M6DIwf6qU9Tqqhz6loJ5+rzHTMOh4F2jiGmBKgslNHiPKOIJ+S7SkAoo+RaJABdqXyRVaAsbahJBl1C7mrvFSmOnZNk6AtHqQgMCYKTARiFDMVdYtAtF6vH7w/JF5/iszobySk1CptlG0KaDVKec5PddKn/p0iDW1P/TjlQ/q/YgV9i+Oi3tRENmWjKstYb5zARZlpH6NstU09vYsxqH70nlP6TMmdopPIVEn55ymyzeXpJXOk+aEHhT3EzMyRmSN/jiOj/WtfMkdmjtR7/lwcea786geuVOOnOsv1+j0bJFNPrwpAKUXp4BSwaQehpwglJWwFFdqJjq1GEiORWrdeF50AcMOZ+b9Wq1mlUilF6jRapySiBKEv4wPY9/t96d0WOp748j76gRypNzoE9akhx6ibjpn7dSleAZvx6jtStI7YfmxP9RMBVUGee1L9jtEmAD/acZxExB/VrwIM+o7EgS5rtZptNhvbbDYlnajelBC4TyO256J7fJ8CCrWZCBQq8xj1SYEt+oyTyBQRpPxDJyrRx/ENHUsEzWgT50okD5WB3puSm5JbSvexnIrK6SRSyVQjeupXkXCj3WtfTk0OI1bFicYpmeoYTq3K5HK6ZI5MR4QzR2aOfCpHuklmjswc+UI58txD17OtcKVAiA6dEnjsfFRmyjD1HhVy7AdCi0AfASr1WcphUvcpmGg/UkqM92iEEkDl75TcogERBVLHihMjBVvq1vY1ehIdMBpcHCsEoNEYro/57wqeAIPqJ+qCvmh6go5DSZ++60lGXBfTOmKkL+pQnZzIGMSdIjb9zRvnmbzEPG6iMNFe6Sd9YyxaN3XSN6I8yEEjp0o+cbKlhKSy10gjsqKgRV8AACAASURBVIqTqdSEiT4qEXN/qv14X7xOQVrTeJSko71pSYFhBM8UaareaeccuaTuPxURVnmkxh3/1nq1zRSGniL1FAaqjFJ/Rzmm2s7l15XMkTIpsXJ7mSMzRz6GIy2BcfyfOTJzJHXFa1L3xLZURqm/oxxTbf9cebYHLrOyYNWx4/f6P0as4KbXqtHH5T6U/XPRuZQjqLGo4UYl6v2pp+JT3ym4qqFE49b2FHD1M/2ce2IEjz6QIqCGgJOz5K9koiSkkREFWa1f/1f5RuKLUTb9n/7TpsovAh3tcZ86CqdM6Sbk2FftJ58R3VTAj7pRGWu6hwJftEn6wwsxVR7USRQvkjvfVatVW61WbtdKHhFQNKKjvqc2F2UY5axyiakTKRtWoDezpGx0khIBPeX/aov6Q6Ffqt9T9gfxxsmPth0nELQRv1ebSGGX6jhlP6dIQGWktnQK3NVHdCyPBXnqUD+KfhEx6hTJPqXNXB6Wv3WO3PGZZY7MHHm0s8dyZHGflLqXh6PMkZkj/1I48tkeuLRDugwYFa9OEU+yicCi9Z57io3RomjEKSXr/ZFIuE7rinXTB5wvOqk6j7YNQMR7kJmOkeV0BUpy1qkbAFOQTNWpOdH6HVEv+mV2jERxn5JufKmbyi2CnIIRMqbvyENBVevTz7REHQHcCti88FIjRuic/qve0BkPOmoz1KERR+0n8lG97vf70lG3OiFYLBYPAFPtv1qtev+V+HViEU/cUsdXkkiBJeOP9oluU8fdRr/U/SR8p31R/W+3W7e51AQt5StqL2pD8ZpzRKHXRp9X/NF+6HhTABvb0TGfwprUNSndY2MUHaeSt+pXfytRnCLdqCfVlX4ffS/aWy6/rGSOLNtu5sjMkU/lSOUz6s4cmTnyJXHkufJsD1zRKGKJSlOhRqEDBPqyQrOHIKV1RvBKOZf2NfYrBVzaTxWwtqvEeWp5UZ9+NYe3KAprNBqlJ2ttKzoQYLBer0ttkKu82Ww8v1plQRSKOgFg2or7AxhfjDCwwfiUw9IeY61UjrnnyEGNmH4hl5Qu1En4nzGpfhWsFIggXNrnvuhwRVE8yK/mfo086jWQiNYRfQDdoyP0EyNyGllEfpCSTjqUZKg35r5H3TJp0DFtt1snXfpFekjKJ1UmGi1EPimSJUrJ53GSF9tR248gGIFPxxgBOgXsGvlKjUv7ovdHAkiRSuxHSp8Rk6hDj1aOEzWK2nOK/FQmp6J7UW76ufqD6kKL9jOXX1YyRx4nnpkjM0f+Eo7UsWaOzBxJeUkcea486x6uKGhKBCX9PBqqgo1+p+kXUUgRzPRJN16nJWWgsZ8p4wEoqVOXrVPkxzXarkbTiJYBrgrC/B+fqCuVimwkPfYRUFHgpi42GGs/tM+kswFiERhTERTVYwRGPtN0FOQS5YuxpjYHR12qPBUkYxRQCU37rdHK3W5nm82mFMXTDcyqwxgZU1tIEQGkjU1qtCiVMkHbZuWN8lo/0dvJZOKkGlMOlFSVyBWUlDQiUUTg0Cikjgc70c3JEUAZN/ae0r3+jmSmkTRtI/pyjAzq30xOTxGG2lVsI+IV10XMUJuMRBzHGb9TsjqFO7HECV2sW3Vwrk+p76JcFet+jlByOV0yRxZWVMo+mjkycyR9fyxHUkfmyMyRsU9a/lwcea48ywMXRkCHYqejA2qnThmFXqeCjoNLKUD7pMarkSXtTzQsBYSYR61OFY0nFSlUQqhWq9Zut0vOZ2bWarUcSGMdSia0ud1uHUT1ZBoATyN15KTztzqE9g9QVb0QVdOIVVy2VTCM5GF2PH0JcNQ+Uq8up3M9Y1KC0OjnarUqAZQ6hAJmjAyqLehYFLg1BSLaa4z4KcnxvcpUr8OeqtXDscakJ8RDMrbbwxvuYzv1et3fC9JqtWy1WrlcVUYxSkW7KT/U3/pzarxqOzp5Ud85Ra4poIwTEb6HzDWyrORN0cg0E6tIAlpv/P8cePN9KkKv8vk5oopYkro22vop8I661b91YqETPo3oYes6aYikltJzxI1cnlYyR95zZPHQjjJHZo58LEdStruH+7AyR2aOpC7Kn4Mjz5VneeBSBUdDjAqIRLHb7dwhzI7Ly+RNU48KJ9YVC4pXJauSlGSiMUewiak0EbQUzGL/AGIAfbfb2Wq1cuKo1+t+nC338mb0VqtljUaj9D2gEwGC76JsaEMJCdlEQ1IA1EgR7VWrVW9Dr1eDIyKmEUXq1Hp1qV6X/jlSV0E6RQqRyBQoo45oL44RWWpEilSUmIpBPaQ6qEOrfJAXMqbf9D1eW6vVPFqKDoui8MmFRmDq9brt93v/zsxKaTMKJEqGcYzaP/VDzU9X0FZi17z01G8lTLUJlVskEp0QUBT81BbUv9QO9boIihEoGWe0k5i+orakaQwRc7TfjJ9rFNhpR+1WdRQJKMo3kojKREmLz2J/1J5V/9pOHJPiiUaEc3l6yRxZth/8NXNk5sincOT+/nRL3sOWOTJzpOpL7e3PxZGph0DKszxwqdNFw6CzKQWoc0dhKpFwrYKn1p0yYOrWiJEKMSUY/UyfdOMY+E77pk/KkAH93W63tlqtzOzhQxifmZk1Gg3rdDrWbDZLBsLSOtfh9Jp+xnjpU6PRSEZGVDaqC8al7Sow6Y/2WQ1XCQu5aN5zdHjVF/Vpe6moDEXTQaLs1Qb0PvQfiVivZ+w6GUHG6/W65JzUr+Adoznq+JvNprQhV/tIUfmxygWZcC+yiu0TyYo6UbslNSdFzKmJBO3pA6HqV+UXJxJKrlEPagtRzmZHouS7er1eWgnUvishRpmrPGIUDZnESWoE/Ig5yIo2wRjtk06C+Ju6I+FR9O9ToB3JKZboW1qv6lk/j3gdJ2d6rWJiLo8vmSPDClmlyBxpmSOfypEFr86+bzNzZObIWP7cHKl9jOVZHrjUsU6BU+ppUwWj1+iTdErZDAqgLoryE7YamRpyVN6pp3X9jChTVIwqSI2Yz5fLZemaZrPp4Iwj8+LGer1u7XbbP99ut9br9ew3v/mNfffdd9Zut+3HH3+0f//3f7fFYlEiOGSgYKZEBcCYPdxDEJ1MI02RgKhXgYrf0cCxA42UpCYCati0QxqE2ouCZGoioGPnOyVerUvlxn38DYDxv/Zd88nVbuJnKfvQCYGCkxIi7TSbTf+b/kPK1Ae5YD+LxaKkNyJ9OgZIvdlsPojCqt1qSo1OAoimqw4AeE6uon6NDqtNqP1EffC/7hWgAMpx8qb2HCdHsW2dZFB/TFPR9qgrVU+sP9pUnLRq27FEoNfrtE5KjLohd+2bErrKT3VHWzEyeQ7ncvnlJXPkPY7w2W6fOTJzZKlvj+FIVriq9ymD9D9zZOZIykvmyGd74FJnj8DCAKPDKVGogxFh0Dp1wDFCoYZFG5rnrManT+6RSCipHGk+p02tlxLBoiiKEnmwNByjGoy5Wq3ab3/7W/uHf/gH+8Mf/mD7/d5ub29ttVrZdDq1oigclGJ6idatBkP9qo/o0NE44+QAoEKH0eBjRNCsHGFSwjMzBzt17EhiKZJPRf60jagX7CmOHbBDvxGAAN5Yp/ZBZc7fCjjYIP+nUltixAjZIFNIkToqlYqt12uP5HU6He+LApXqSu09BcpaaFvHovfqJEGjfDH1Zb/f+8liGtlS3ahe9HsFT+Svk1RkoZOAKMMI7IwpgrRGXeMkKeocm9C24qRV+6WpMtQTT5SLslHdqM3q2PRv1YHaoH6+2WySJKbRR/4/RRwR43J5eskceV9fpRyhzhyZOfIpHMl4asI1mSMzR+rY9O8/B0eeK8/ywBUjC/rkbnZMdYgPJHFyyQBZJjZ7CNwp41MQVINKkZBZOZKk4BQdPxJJXP7EgbSvqrho1ACmRpr4+5/+6Z/s7//+763f79uPP/5o//Iv/2I//PCDLRYLjwSqvFSuRXGIBOLARGAiaGj6hjq3Gnw0WnQDweu9Wn+MBjEu+qqOTV9iVDE6VrxG/1Y5KjjpeE9NDk+NFdtdrVYlmcT+UreStYKjjol+Rp9AlpvNxnUMeTD2Tqdj0+nUOp2OdTodu7m5cVtUG2y326UJmOb/0xfVJaCmUb5o6wqk3KfXq5zVVhhzBGUliEhUUW9cx+lO0cdSE03NBVe/0/aV5Ima6+Z89XvaUhtQn009hChB8XnEGu1LnHxFIipNVAOWads6ZnAiNZmIPqj60PqQEW2p/vID1y8rmSPv+3p/aIYV6Y3omSMzR2p7kSN326OsMkdmjnyJHHmuPNux8FrikzIlGqFGycweOr9+HzfxaUkNWBWsf6sxRLA3e/geE75PRQKJuKWAiToU/NkAyj0AVK1Ws6urK/vXf/1X+7d/+ze7ubmxRqNh7XbbWq2WG308zY46iPAAsnqiURyjgp0akkYmNF2FehW4ozHzE50X/aXu0z7xvYJYrAuZK3BrH2KEIqVbnYzo/xR9maVGCrUuPo9AoxMLnbholFUjJKvVylarleuVSI7aq5n5BIHorRJ4tXpIR9ntdn6d6leBV8kiZZ+aFx7ll4qWoQddmtdIo0bzVA6pyZBGmKhXI1unwD1GxtS3VVdqc2bHHPhT+JMiv0i4gL/qIfbp1ORXr4v/077+rf1XncUJt7bD2JmYoyd8W+0jtqP/Rz/N5XnK3zpHmowzc2TmSLNHcuR9SmFFuCVzZOZI7f9L5shnSymMf2tH4/8KIAoC6rx6T8ogSuB9XzQ6pU/R2gfa0TZV+WqoKkSNDmrkTu/jJ0acUCKGx/Uodj6f2z//8z+bmdlgMLBut2tmx/dnEP3RcQPS2p7KVMeu7aosAKKoS34igSuwqmNHMj1lfEo8tKsRIZUJ12jEyaxMempH9EntQyNmGkVkIhDHmwIuBVO1S340OkPbeuIQ7en/6/Xao4TL5dL3HGgkstFo2Hg8tk6n4/cowDQajQd2eMrG6Dt9UJDX9mazmY9Dc8lVzwpkCqbUlYroQlYK9FqfpmJEH1LggzQiwAGUUWdxskI/sX1NL1Abir6iE5nYR5VRtBHGq2Cv/sTf6lcpbNLr9EfxRyN1vNxVJ+JFcXxvDf7AGDQNLEZd1W/i5CuXx5fMkff3hQlT5sjMkU/hSOqvVg4Hk2WOzBz50jjyXHn2Y+EpCmwqoNQ1Kkw+N7OS4er30WhP9SGShD4I6XfRcFLGFIEVwWt0hrxg7o1P6MhDjQtgI92hKArfYKnGFx2bCJ2mRmi/tZ0of61bgfLUpED1RElFy2K0BLmoU+jn6syqG0okFtWZAqL2/1zUmO91NUnJF4KJS9yxnthW3EAdc99Vz+v1uvSzXC5ts9n40cYAY61W8827rVbLms1m6QSoRqNhy+XS+6MTiGq1WkqXiPbPb6JOajtMUrDt1CRBbUHBDmLQFAedjEFumpag0djod4wzRq11MsJ9qh+1pdSEVidDii/UEfvAGGKELdrZOQKOQKyEqv/HvurkIO53SPmD6tDsGI2lPsbB5Ez9SmUY8VgntLk8vWSOvPdvmdCluCdzZPnzzJFljqxV7/e9VTJHUmfmyJfFkXEcWp4tpTAKBUXpdxEUtaMUBSQFCR2wGrTWo4bCdZE01Kli3059pkpSYFIgVIcByNV5tf9q/KokIi0K9lEmGBJH2uomTmRmdgQ4NYAUEOh4iuJ4ek/UoZ7AoyCl5ZSeVObxM3UElY/KPMow2oROXKJTxsiV6hiZY4Oa5qM6iEQWN5urLNAnsicHfb1ee3rEcrm05XLpefA8bJHqstls7O7uzt8Ps91urd1uO3nQtqbeqN1jI5pnT1ESV9mSz4z+GX8qhYVxa7tRtuq7Uc/6fpcUycVr9LhbbUNtRfUb7T5OWGO6TfR3jWzpxCGOOU4yo99GTFH/UyxRuwboqTtGraMuUhindqykrragMjlFehFT1fdzeXrJHFmxbagjc2TmyKdwJDLabraZIzNHvkiOPFd+9QNXBAqK5peq8+tA1CHjtQo4qchZfDJPCSQVDYh/IygFykhQ8bdZ+cVn9FPrAeB13GqgGkFBXkoa+ps+1et1j9ZplEUNWfuCU6kjaDRJ5brfH5bIiSAyPo1EIS9KypmiIyIjfqucot70vjgOrtX3XdCvFBjQZtS/gor+HfWhfaPvyBxA1igKK5u73c4jJsiYH81Fh2CKovDjbeNDupIChLNcLkuROewhRml0osP/Cra6qoaekS8vmtRTtJAlf2uqDbauNhhlHMen6SE6GYq+q6CpNl2pHN+5o/qLE5oIhhG4I8hGX4o2pEV9XIv6QapoX/lbZak2yXjjmLg2ksKpyaLKGP9Q4tK+Rb1FHeTytJI58shRu+CnmSMzRz6FI2mrWq2WXiOQOTJz5EvhyFNjMnuGB644kJQTpwbM51Ho55YkIxlEwDolZP0+FZ3BERDyKVJTBSsYRkNQQMLZtR7aTCkLEqY/Clw4kDqv9k3HkgJO2mIDJsSieiOKo23r/qJISgqAUX6qN9WXjl0ncCnyiBORaFPaf72P8Wr9sR4FW7WpqHeVMzJR8iQVQtMwNCK72Wxc5kTvIJhK5RiB5XqNqu12x82+/X7f/6d//M0m10ql4n3RsWu6q06ezMyazaan6GjRsauM1ZdUhnod7SmAIRtyp7H3qLNYl04+daKmNpJaeUEWOtml35EoIVKVW7S/OAFRMsGvIkbFFQz9W6PWSq6RUBX8T/lWJI5IcHElJI5P740yjP6Ry9NK5kjZ9yNtIYfMkZkjH8uR2JTqO3Nk5siXxJHnyrOkFKae2mOJAzAr582eW4JXx1KQiMKN9/IZQEe+ry69qgOlhE2b6lQAbbvdLgGJ9kfJJzpBJF6KRkS0H5GYuU+NXr/TfqqxEE0iz5loUlEUnv/MZtaoW50kYPjaLx17lOWpfuqPAmgkB+7Vcev7GtQZVC76uYKS2TE3Wx1eIxmAVZwM6LiIwEW9qby175VKxcmFPpEugQwrlYo1m01/UWO9Xrfr62u/ttls2ng8tv1+b61WyyqV43tHkCOnOwGQjAfQ4loAsNfrlZbmiSbSPkSousJeVe4awWu32zabzVwm+l4Z/KBaLb9PSP0ggrrqj8+Rr6aVaERO9Ug98Tt8WSc3qUml2rnWpRM/tVk9oUnbRx5qp/q3TlSUDCOgp/CUkpp8IVfuUXtGH0r42k+t8+cIJZd0yRy5cb/mPngxc2TmyMdyJOPlZMrMkZkj/5I48tkOzdCS6qwOUKMeMYoVAVYHBgCcAogYRcB4MeTtdmutVssWi0XJodVwzB5GosyOx9Xy2+yQH9xsNq1ardp8Pvf+0abeH0mK3wCYylCXrqNc6TPXKVGpLPSEIYx6u916jjSAAzgoEFBHyhmiY+vYoqPqZmUlUv0sEkkknpT9UH9Mo4myZuzaFg7D5ypPJQTNW1ebVSLBhvlc71Hn16hcvV63brfr6QoKtOgInS6XSz/6mE22amdqY6oztRuivfzoZIH+qF7RG/qFZPBVff8P7wAB9KhvOp36hmZsGZ+KLxnU/ujLKFVvatMKcGrzmrZSFIeoJvYRbVX9UnWfirSp3Wk0W9tCX9gx9aJLtTXGEP1MJ51q32o/MSKLf6rPxIkVnzMGfbkudcb74uT+VL25PL5kjjxgl070daWJzzJHZo48x5G1+kHfjUbDms1m5sjMkS+OI8/x5H/LoRmp/7VjCIBO8pCiS48ogbqiwCLInfoNcKgxc/QjCsQxiSaYWenzWIe+wwNAbjQa7oQ4cLvdtqIobD6fm9kxx1lloBE2BS6VHzJUsMLAopKjA6l8id5plEPbw0EYv5I+8tR+x/FEAuDFeZEYkQ+OHaNtWhd9U9lE8o3OH6MdEfBj1CkVeavVaqW9VXpiDVE7BQm1b50gqUzr9bo1Gg2bz+dWFIW9f//e5vO5LZdL63a71u/3bTQaWVEcoqn9ft9ubm5sv997zvhut7NOp+MrYEVxOLGr0Wi43jTSpnZbFEUpj51ooMpZN5KrjdXrdWu321atVv0lzZXK4WheZLZarazT6bj+Wq3WA9DCPiMxqP+rjejnKlPsU+1d/TliCSCqKRyqe/0sRtV0Ykb7ShqRcOLkWscV7UN9VydO1KM4xTVqayn7i31V+464eqoP/GgaRy6/rmSObNhGeCBzZOZI2n8sR/a6PTMz6/f7NhgOM0dmjvyL4shnOzSDQasjmz08UQml6/dxw5uCVSQR/czMStGFaHBmR6PUZV82vqaAC0Ur2AK+3FOpHHKK2aip5/mrEdO+Rv60vUi89E2NKipXDSxG0VROGIyCn+ZM01eiNSoD2k1FP6if/zVKSV28/0LzkBWgUuNGrkpY2lfapWgUAluJ+eEaRUNufBaji+ieyFmn07FKpWKr1crm87ktFgtbLBaehkDaQ7V62LyrY1U5QkwAGuMkurTf763X69nFxYW/5wNyGA6HHiFut9ulNEMdDxMhonlMXjRaqpFv5IOesFMIiT7GSDnAirz1VEVSPiCdeF8k3liXgjMkx7sy0BmTE+0juuVzvoMYqU/TQVRHXB9JAP1phFTHwHfq5xqFxG7pF3aj6aOMCTkr6VMUD3QDtE7mdEIaCZgftcdIhLEopqusNKqXy+NL5sgjR07v8aZer7svZI7MHPlYjqzV7lcy7m0tc2TmyJfGkazup8qzHJqB4PhfSyrywzXxiVZBSgVGG6oIfitZaf0YeCQhjJj7lMj0aX+/31uj0SjVYVY+jrXZbLpB6XIx0SIFa3LC43i5Vg0jZfRqyDgHAKERGl1aNTu+rZ17WN7WaEUcPwbJ3wAhzhBBkvuRB58rqNN//o4EriQGyGjkgWvVPojEofcYZYFI9TrAv9Vq+Xtd6DM/yGK73TqJTCYTu729fQCK3K+kqvLU1JrVauV28vbtW/vpp5/M7AAYi8XCRqORrVYr1+3333/vE5eiKKzb7Vqr1fL3kiBjxq4TJ9IrmMhwXbPZdDuh74xT7UllTTvL5dL1QmRTo0tc0+l0vD/IX+1GI+Cbzcb7p7aEPKPfRBJivLqZF79TEEVnqt9q9bDHZD6f+//YeQRcJacY4Vfw1zZpTyeiiovaN7V1xSydaCk2aR3q7+hH/4+TNO2zYptONGhLx6U4n8vjS+bII0duZRUGP80cmTlS9XCOI5fLw7Wbe1vJHJk58qVxZOoBjfJsh2YoWJiVBa1PlvoUqYDDwBRQoyCjkjQaQX0IQAdNX/R6M3vwslmMkX7wGYbAGKiTF87p+FNPxEQd1Cl0PEoecbWM/mjUAECGGFQPGqGbz+d+LZ+fijKpsdEOuiA6osCtBsiDppnZcrm08XjsdkDECdkALEqggB5kBOCblXNwFYxoW51b9agyYpz1et1arZa1223rdDqeBqoRLu4njx/bBHAqlYqNx2OrVqueDoCdYif0j5OWIBG1oY8fP5qZWbfbtd1u9yClRidReoQuwPTq1Stbr9c2nU6tWq3aaDSyzWbj0Ue1CQXBojik8WBz1WrVN4KbHUCJQzu4F3lCRApi6/XaSSmCtvqXRsV1fBph04leihywYY2SUacGOLA1bKQoCut0OqVTpiJ5qf6RGzhA5A9b4G/kpBO57XZry+XS2u22bTYbWywWJb/ChxTf8C0iumpP2LaSjxKuTvgi7ukeHCUtxRHVWZywakFHSjC5PL5kjmT8D9Mo6WvmyMyRP8eRu/099teP9pI5MnMkun8JHKkPl7E8ywNXHBidjWkM+p2SiD6Rch0ABhCqM5hZMhpmVn7zOu2jTAVxQIMnfzUG+qBP8hpVQ+gYDdEH2k+BvxoqfTazEpEocagyGYMSoT7B63f7/SGSqJEWlvb1iR1g0LaRK+0rKEXC0ze6s6SsZbFY2HQ6LYEwfaF92gV0IJV2u+2nW6FHxogesJ1UREHzx7GHdrvtm2vb7ba1Wq1SxJi6N5uN//D53d2db3KlrtVq5XnaEI+CPvbKd2p7vNBRD8tQoOVvUiQ2m41Hy5B7rVazq6sra7Va9v333z+wAQCKKC/+sVgsPC2CyBr6ZVKAjLlPT4hi2V8JQAENW0O//ADk9FPtR4FKgxb6vcqKvuuEBEJReeqkUXEHkIYkICAlL65XAEZui8WihBm0T906Fl6+ij2aHVMPsBUIRHP2sQFtXwk1Tm6VHNQGtXBNCoeVVGhfr9M6cnl6yRx54MjCyuk3jDVzZObIx3Ak46gUx5P+MkdmjnxJHHmuPMseLp6EGYQSggIonY3RHwVwMystreqTvAovLhtyvxKILhUTRcDRNLpDfjFghzPxgsNouMvl0iMeCsT8z1I19+oStBoHfVZjVrmhfHU2ZKDRO3VYfjM+ltbp3ynni/neyELlrASJE+IsGvlGdrVazS4vLz0SZHY8zlQfTHe7nYPrer22+XzukQzq5ndMPcF+AG8iZnxH/9nMSt45JAAgqI1CRvV63ZbLpZ96hHyIfh1SHJalSRHjjNE6CCraCTLGxtExUT/NY0fPjUbDbm9vbT6f2+9//3tbrVb26dMnlx92hmzpF5E/+o7tEfVjzwUnP2nUB/us1WqlcREtxMb5DFlyH0AY0yaQDTpQIsCHGo2G2xCfq8+rXSoBKfHirzohW6/X1ul0fO+KRuzQoYKyTgz5X6OWy+XSiS/6MBMz+hSxEf3rCgJjRX7IVNMZdDzUF+vWySxkrzgUZae4oxjLdbk8rWSOLKcfMabMkZkjn8qRKt/MkZkjXyJHnivPssKlkTM6hMPEa3SAKFENDEdigCpEBqrLpHxnVt7IpsvBMSqnyuHhqNvt+hIoQEvBsVxo93XzdA5IxOuVmKJh0jd96o4yik/iEZxwDo24cZyt9ol+ahQslWoRjVJlhU7ol/ZDJw/UoXn7/K0bhReLhdex3W5tNpvZcrl02S6XS5vNZh7dQH9m5vcqMSrQAhCQWqvVOpxw1Os5qWgaQnQYiABgxZ4qlYoNBgMbj8cOGrPZrGTrpDLs93tPYVitVrZYLNyRVQiCZAAAIABJREFUlWgWi4W12+2kbDWiggwA/KIobLFY2Pfff2/fffedrVYrm06nLgN0gV1Qd6fTcfkjI3SHTyrY6mSFOpmwkS6ifqd9jZMjdIu9xUkSn+skQ9Mt1DY0oogN6WRT29bC5BS/J7KqMsM38Vvu08mX2fGdNPo+F61DUz6YpMT7mfDSpral/+Pv+GkkInya8fIZeqe+uKKhZKSkyXc6GdFrc3layRy58fb5XtODMkdmjnwUR+6P8s0cmTnyL40jn+WBi8YZQFzCRql0CEXriS86IFUGAyLyENvVJ2jAixJJgT5G4+Y+XkSn4FQUhTtwpXLYUDmfz5109vu956Kqg2jb+kSu0QZkgzOoE0W5ae6zroQogUynU3c0Uhi4l6gEulKDpS410lOREU8NKQrr9Q5HtKrMAWiIlLoqlYqfJoTOkCd1LZdLq1YPuenT6dQuLy9Ly9n0XTe46phwTiVpxqfL2jg1QBLTGdA1aQ1qmzr+SqVil5eXNh6P/R0z2NLbt2/t06dPtt/v/QQniASARAbITNNxACLIiAgzIN1qtWw6ndpkMrGffvrJLi4ubDweu+6Rv0bVFDy73W4JkJgYNJtNnwjRvm6aXq1WLsf5fF5KY4i+hg4UlDUaHaP2EBUTK9WL1qHXcz/fk3qjEWBNHVFf6/V6btdE5Sn4MwSJ/4IFSvgxlQM/QacaZaMv2KuSBH6j0UHqpH3FJiVvJTMlZ3SlkUHFaXSSIhXVDXXqpDKXx5fMkQeOXIttZ47MHPlUjmQP194sc2TmyBfJkefK2Qeuoij+TzP7//b7/c3ZWmTg+nSJc9JBOqwTS1U8AKxgR12AAm2pIWF0GskhMqhAgkBRMv3FAIvicLIMb5eHvMiV3u/3TiLb7dajLoADCqcdBTOu0ydqM3NjikrlPmShMqY+TvXhKNZarWa9Xs+63W5JTjg1YEH0BkfHSPlbl++ZGJB/zTJ7q9Xygx50UgAIaBQDfWhdAKk6Y71et36/746hdtTpdNzWVquVR46InCA/jQqrnjXComQcSRQnwy6J3EEC+tlud0zzAPQhE8A2FX3GBgF+BX1kgT50AkFfIaB2u23L5dK+fv1qq9XKut1uyeGxKyUmBbft9rAJFX3xuUa00J/6FnrWKDZjIpceclL/UBDTyaX6HbJW7FD/Xi6Xrkd8n/6Ylfen6KQFuTJZISKmUTuO2dXIJzJRsEZnihXYB/iGvJjI0S/6gK1Wq+WTTcGi7XbrtsV48CO1UZ240mftu05S4wSffigu4QN6vf7WaGMumSOfypHaP3wtc2TmSOz/5ziSFa7CjoHJzJGZI18SR+pDeixnH7j2+/3/c+57igI/g4hP8xrBUlDEoPXpkM9TTq5P6ziuOgcvmQNcMRZygXGAoij8TeXcz5K22THlARBF6WaHl+5xLU6Fo3MtY9YX4Jkdl2EdQKx8hC9tx6iGgjNOCrDhjNVq1SNqOCv3M2YMEwDQo1HRERE1NTB9+leZahux39gGfSZaUq1Wrd/vm5m5M1arVX+fh26qZILHAy0n/rApk2jEdDp1OdGukhHyJv1BoyEadUN3erpRp9Oxu7s7j9wAbrVazZbLpednA5jb7da+fv1q7Xa79CCFDtVuzMyjhcib04Lm87nbMZFlbAn7I0K1Wq2s3+/7BEOjxfhSvV53G+A7xozM414C+oj8kUG1WvV3iUAW6/XaJpOJVatVt0etB5vTiSZYoSAOkQLGCoike0Ai9DVGvJlAMsnTVCGNxrNBmsnKbDYryQ0fpS4lSPX7aK9K2mZW2hegpI186/W6p87M5/PSJC+FBXwe/VsJX/0Q+cUVKpUf9cTVBcbI/7kcS+bIp3FkrXo8XZDJTubIzJGP5cj7M1es3WlbvV7PHJk58sVx5LnyLCmFNKq/1ZF5SuR7BX8GqEpqNBq+/KxLiSokvT8qXyM0mrNZrx/eYk6Elr6xkU8NAHCDRIricGwmQMB7FHT5EcLCQQEpfUrXvkfA16d0vVcn44Aiho08W62Wj5PoVrfbtclkYvv93oFN0xlUX/wdCUiJrFI55Gcvl0vPEdf8XRyg2Wy6/NR40Qmy4Rrkj4PSj1Tkq9FoOMkzeQB4eBu9bvDUaCLL4fSZSAl6Rs7b7dZPj2q3D8AOiNFH6uC9H3d3d3Z3d+fHDAM+9AVZMQZNaQHIdSM3/dOIEe9FAXQ0GoVNQFpqR5AofhJ9FN0gL/wVnbOZlYkRezI0aka92OF2e3wnED6jUapT4KTj1/0Mmkah4MYmaLPyBBBg170ATHohRbCFqDGTGWwB/CBqpmlSq9XKZrOZXVxc2HQ6dRCHCNvttmMSMqAPOtFWDNztdn7SFwXbwN6QAdFwMJPvlFiYKMZJa5yg0yeKkrSSiZJMLk8vmSP3ZoI3ZuW0qsyRmSN/jiOrlXus2x3xP3Nk5si/FI58tj1cNKYDwjARHoJAyBgGhqiRLb1PFcB96vwUBq4n9BABiCCp/cY5eOrmWpaLMWpyv7XP1LPZHE9aYqxEtzAOHD5GUABZlA0Amh0jo/pUbmZ+Og+O3u12PQeatonkQCSMVyM6kawxTI2C4VCdTsdlj0wrlUopWhSdlHErUBGhM7PSyTiVSsV6vZ4VReG6QJb0mZxxXeYH8HC4fr9vt7e3DkI6dvqq4ITTUcd8PrfZbOb6RKaqN8h8MBhYvV73lBqie+ie+9AhtqAkiB0hH9JJut2ujUYjvxcZagQH++VUMI2Kkk4BoGALtEU9Zlaya/TfbDZdJnGfmY4JO2GSpbJTm9VoluoFsgRUkUulUinZM3s00JvigmJIJBtsq1ar2Xw+9xOm8F2N0ilGIUf8RAv6pn0mqkQh8QFSlDgJDXIAx7TouGlTo/yKXcheJy460VYdxcm34gD1xiidtqnf64pMLo8vmSPvOVImReBK5sjMkY/lyL3tSzLJHJk58qVx5KkHZbNnOhZenUKNCwOL0QTu0WgcoImQNWKjUS4AiSVT+kCdCJon+c1mY9Pp1J0XJ9f+0B4gpW1rBATgISoAIO12u5LRYZxKnPrkHiN2CrY4Id/r30rKOAfLqwAqkR2NDDGBTxE+MqVPOIPmwQLA9LXb7XrELeZkq4HrGJEFUTUeUGmT+mezmfV6Pc8hRr+QENEhxgbx9Pt9q9frpXeXIE+NXqgetN9m5iQwmUy8Tsit0+nY5eWlffr0yW1LI8+z2cyX9pfLpZMe+tcoZ1EUvnyuG47VjvGboig8UkcEjwiPRkYHg4HbWa/XcxLCZpCLvrARnyMiRRQYe9SNxfid2TESpcSiAIis0S26pO4vX754FFZ9hz4Bumo72AY5+PQLEI82qylY2icmX/pga2ae9jIYDGy3O6aNaA4646FvOtnBJtAbtkDf6AM2rRNLrk/5uhKmjg+f0MkoOlAC0Uk10VMlmUi+SjA6aUYX+YHr6SVz5JEj1X70oTFzZObIR3Gk6CNzZObIl8iR58qvfuDSzsaIWuwQnymYIwgzezDR1KdbnEkFzI8OUu9TINE8bNrHUGgbgGY5kvxvCEiVQL1EQ8ih1qdvjQwoEWhKgBolT+CMV8eohESEjAgEoE6/FbD2+72Nx2OPZOG8tB+JjUgJUZTlcukArhttAVPq02gl7Uc7AVjQA/aCDjDs7XZrvV7PnYt0AH6QmYLB3d2d9Xo9u7i4sNFo5IRP1ET7RJvciwOuViu7ubmx/X5vr169chtqt9s2HA49d5l7ttutjwf7J51Bl6l1gzSkD7BqlAngBDQmk0np3RT0nTz/zWbjG7uxMQhR5Q+IIj/Vo5LEer32Sdd+fzxtq1arlTbLAi6QEIQ3Go2csBhzs9n0l2Ay3maz6cTIOObzeQm4iDqS0qLRvG636/6m9sVkRaN52Dc22uv1rNFolMg/RqyQna4u4G+kfXKvpg8jXyUyxQoFe8WwSCSMVwkEfMC/lDQiISgGM2GBzOLDPX/TXpxoqQ2pbHJ5fMkceeRItTnkkDnyqL/Mkec5snbfn1arZfV6PXNk5sgXx5HnyrOkFDIoLfyv32k0jKdnPsOoI4jyv9apT7AapUERGlUiwqUbePXpmCdu6udJG+PRyI9GKTW/WKNVOLTZMQ9aFafgrUTM//zodTgIy9eAFk5PvrPZMSoBMe52u9I7N8jBVblqX5ARfW+3256LD6DgMLqxEtLVSKDWjX4BUQCX6AvtEpXSiN9ut/P8aupE7gBVv9+32WzmUb3ZbOY6AHiwMyYO6A0i4USpy8tL34tgdgD6i4sLm0wmnpai8gP4yGdfr9cO8vSXomAAkCB3IsKAPcfSxojtcDi0z58/23q9LgE3Y0Q3RVH4ARwa0a7Vav4+EiYNyIRJG2TPJIl6K5WKR7oAoFar5VFFjfSgC8pyubROp+Mv1iSiCNFUKocNutgxEwLsic/UzxVvdBKl8laCIfrOKVFqpxqpJHKJPGhTU58UF9Q2FQ8iCNMOsiFNB9vFHrE7ZMnnTAaq1aqfhgaJKlGAj0o2fKd94fsYmWO88b6Y4pHL40rmyF3Jzor7PmWOzBxJfynnOJL26F/myMyRL40jI85rebY9XGbHU0oAEAAORWv0qt1u23g89vvMjptdNQ2BAanz6xMw96kxxeVEs2MESDfWxadpjSggaL2OsSjIxWVtjaRo9EAjSLQXyRMFasoE42s0Di8nJPLBEjQgjhzpJ+OChKjf7HjiD+PQKAP9Q2ZXV1ceGTQ7rsTgQGzIRLe0qRMC5ASZsNn24uLCZcMyNH1arValt7NjN4yt3+/bfD63yWRivV7PKpWKjUYjz8EmZUCjMxqZAGTMDidZcerN69evfYyME6K7vr629XptX758KUVSyE9fLBY2mUzs6urKJpPJA3tVAtHoFwTNBIUJhEattB8aScImlbCwRc1ZbrfbDvadTscnHESIOG2KfhKVxp65jjETAUWvRMO5logf9sbEw8x8vwRRJcAV3+MaiAs7n81m1mw2S+PmAVWJWH2Y/iiZbjYbrx870OjoKSBl8oJNKS5pdFgnh1yLjmKUDn1DnBGXdCKqEz5wSCOO2g7915SMFEZofdyrtkRdKodcnl4yR+5KE73CHr7HKnNk5sif40jX936XOTJz5F8cRz7bKYVaIuArCABaRBc0yqSRPhRNfXq/ArBGQxgw9/E50SbqIDJhdnQWnoABCRWwkobm8lI3ILnfH17gl8q3VSCLJAN5qvGrYbK83m63fSnd7JheooajIMgSMdEYokrIVI1D5Ug9w+HQWq2WLZdLu7i48AiDkjfOrUel6vdm5s6ufev1etZqtTzSCrjEqCh9Yol8sVh4xIqNv4PBwOVxcXHh+wyQK9+pnWy3W0+lmM1m1m637fr6upTfDumu12v76aeffJNtq9Uq2QppPJwmZWYO9owZ3TMpwLY0GsQkYbfbPZggQKx3d3d2cXHh0Uo2txJZhayI8lIPkTeIAhm1Wi3fSK7EzY9GDnXTMCCj0W/6ZGa+R4KNynzGBKRer/sLVFU/mkrBhmY2tqMvTgHTF2kS0VKZIldIgGN4OToY38MPdHKHjzHhAJ+Q2XK5LE2gIDXkUq1W/RrGxnHLGlWDbJSQwBT8MmIdOIdNENlW3GPs4JpiIzhDUXJRTIgklsvzlb9VjpzJODNHZo58Kkdik9VKtWT7mSMzR/4lcOSzP3ABAnRGDdLseMrIZDJxgOEJ3uyoUAUmQBPlqFBi1EJza6mfevlec0rNykKczWZWq9U8RcDM/Hp9+iaHmydws2OOOtEZlQPj5h6MhfrpB2M2O4Bwv9+3brfrjgsoABzITqOUlUrFwQwy3O127oCQi0a1aFfrhVT3+73nqJM2gTMAMtSpdUG6elqTygwA0EiaGjbXQTIXFxelDZw8oBDxxSYqlYpviG02mzafz22z2ZTyoYuicB1eXl7au3fv/DuipPv98QWH2B7L7bqcrtGU9Xpt4/HY3rx5Y9PptJSqAPk0Gg0HSnRmZp7moO+5wO41PWW73Tro4lPdbtftHVvQSCWEjA4AZeyn1+vZbDbzNCOO7o1AxkSQ8RRF4XJtt9s2mUysUqnYxcWFjcdj191+f3gxp9qjTqIYCz5yc3PjsqYPRHshGz26GpAnKsd3tFGv120wGNh0OvWJ7GKx8HeM6EoDY2R/A+1jE+iKI4BZtWCcyILJlxadLGMvRL91sso18T01TG4UN7ET7tPoP/0DX5XMUv3SiYROHHJ5vvK3zJEeJa8e32uk/cocmTnyHEdyLPxmu8kcmTnyL44jn+WUQjVeDE8jXxgdDyPkDJsdNj+aWWmpEOPHmHFoBoiBAfK0reQAkPGZ5k5rNBBBA1yVSsU3J3Y6HT/tRqM+ZubLuFzPeOiLRlwUFABRnIZ0AwwS0iHaMBgMShuXATwzK4E9Y9NxEoXY7/c2m81K6Skqa5XRdru1fr/vdXBE6GQysX6/7xuEcTIAltx5QIJoxG6383djYOQaVYJsIAROjELH3G9m9vr1a5vNZnZ7e+vkprbX6/U87QEgV9Lr9XpWrVbt69ev1mg07NWrV55bPRwO7e7uzuUCyRDd0+gypzKpreHsADG52OrYEAl2S0S1Uqn4yUJKohr1rVar/o4LooJv3761yWRS2gBMpFojNvRzuVxav9/3jb/D4dA+fPjgBMqLMrfbrf8NgWl0HGDFDprNpk9ems2mffv2zV+yOZ/Prd/ve+QO39PUHPYo0A8mPuv12gaDQemUsX6/7/pBn6RdqAywLU0NggCq1UNut75PhMicpkLoRBFf1Ykcfowv64QIUlKgR+eKTfzgL7pfQSPp8XrFgkgQMTKIDdEO5KrYQX1q0/qZRqBzeVrJHHnkSH/AupdN5sjMkU/hyHrjfh9eJXNk5siXyZHnyrOscCn4awSF7zBQiMDsmHNtZv7iQRU0BgG4IDAVqBonURSN1ul1umzJNTEygWDZUHp5eVk6AhRlzGYzByGWc5XUaI+IIXu7iDJAhIyXtAEAleiEAiH9g7Q6nY6P2eyYdhHBa7/fe9409at8KJBCu932aCHtt1ot+/r1q7cF2BPBATj7/b5Hb7gfvRExQZ8aLQGguU/lSZ9wHMa22Ww8YsWG50qlYpPJxKbTqTUaDV++3263NhgMbDgc+nJ5s9m0d+/eWVEUTjq8HwPnR3fkL/d6PZfzxcVFybZ3u50fNzwYDDytYbPZ2Js3b+zr16/+rgmigUQH9/u9XV5e2nw+9+gS6R462TAzB83FYmFXV1eenqAbs3VjNL7Efeiz2+1ap9PxyNt6vfbrdZJGdBZdQXLohd/tdtt++uknj/xyelSv13Nb0Qkk5KikSZR3Op16nWaHaF2327Xb21vbbDY2HA5LAQ6irdVq1cbjcSnSxf2dTsfTRDabjZ9Khkzpg5ImslJSUPzAFukLtgHJMQFWAgWD9JQz2tNoqZIKkz+iuIppyE1XSZRMlDRoB3tVvFS8AB90jJT8wPXLSubIVWnVClzJHJk58ikciT/U6rXMkZkj/Z6XxJHnyrM8cNFxBBGX2vRpUzva6XScSBgIub8aXQI49akSgeDQZlZSuEb5lJy4DqWqAjQysVgsHCRZKqe/RVFYt9u18XjsS9eqYIyEfumTL/1hPKQAEN1T2SnQYfCMGYdW0Fcjp47pdOr5uEQjNDLG9ZvNYUMu71igrzjlYrFwB1QSrdfr1uv1bDgcWqVyyMEdDAYeKcEZNUJK1EofLDabjUeMut2u6wonoqBH7OLz58++AXi1WtlwOLR+v2/b7dajnkRDO52O1Wo1u76+9n71ej2Psulxr6RiQPhMRsbjsU0mExsMBnZxceGn50ynU2s2m/ab3/zGBoOBvw+F9IbLy0ur1+s2Ho9LKwXdbtcjhESssE2imZ1O5xDduweGfr9vo9HI9Y98m82mE6xObiBKokf7/d6ur6/t7u7Obm9v7e3btw5U2JjmbhOZ4zNNN8CWibhtNptSHrq+jwa9MTaNSo9GIzM7HiFNwfY0lQAS0pQg9GVmpXQY6mw0Gjafz30fhUZ7F4uFdTodB3P8ER9iwgDh6qRwOp26jPEl7kf/ELLqHR8Ax/ABsAOSZz8JE0fFBp1gayROcVbHgDzBGPxJ69SoXcQi/s7l6SVzZPNBP8B+s8yRmSMfx5FgemHH/V2ZIzNH4gcvgSPPlWdLKeRHB6HRHR0IT42kTTBQM/MlSIyeKJ8CH1Esoiss6ZI/qtEi/iZKQMHgEJw+xRKtoK8YDsY2HA7t5ubGVquVDQYDB3Sz49vBFUC1HwBErVbzjbb6FM+mQUCcfmkOq/Zb+7xer221WnnkjxUTTY/A+HgwxACJAiC3SqXiGzGJXNVqNU8N0fSSXq/nS9p8plFJojXoQN/0XqlUnIxZymZCYWYuK0CNyM7l5aXbGMfVbrdbm8/n9t1337nsybmGGMzMXr165RG0i4sL2+12JdLWaKxGg/Q3gAi4ElECcH/729+6PCAQfc+HHqyCrWPPRHuZYEDYi8XCvn796lHO+Xxul5eXNplMfFyMWfcMcPISBD4ajazVatlkMrHZbGadTsd6vZ798MMP3oe7uzv3ZfUhfBDgZnP2aDTyfmnEfTab+SZtjUgr2QHQtNnv9223O+xhAVir1arXM5lM/H7y6km1MDtE05iAkqrDiVzL5dInELvdzq6vr0uTLXyPseMH+Jvmq6M77AFZ4cM6eQJvNMqmoK6TUY3w4Uv0h2v5XDEXXCXSrZFDMyutQihJMDbGpNgHxtOW4mQujyuZI4Uji/KBH5kjM0c+hSP3O3knV62WOTJz5IvjyHNByWd54FJh0KhG0TRKBkDx9M3SNJEJcm0VUKiP6L+ZuQJxSlUQm/N48tcnV340Egbwo2zAnad1TqsxOyzNslmRDZaMWxXI3xqtA2Ax8nq97qffkKJAfYAw40IOajBsgiR/WI8gnc1mdnNz4xGd8XjsgIA+er2ebbdbj04SOaFtQJyTnOjXdDq1f/zHf7TZbGbfvn1zkiDnGb2ytEs9lUrFN5qScsBkgo2P/X7fSQPCQ57IgHFDHKQMAAK8CwQAph5+SAd49epV6TvA3uy4hF6tVh3U9LhVolV6vZmVIn77/d4jvJBor9ezyWRi9frxBCI29wKKCmK8fHA6nXqUj3bMDkB8eXnpBEjEikgQk6Fq9ZCTfXt7a71ez/74xz/acDi09+/fO+BjN9vt1qOFTGLoE7ntKb2S30206vb21uvA7ni/TVwV5r0j+OtqtfL0lNlsZm/evPFJAeAK4bbbbfcxcuXREUTZ6/Xs9vbW5dDpdGw0GtnHjx/t6urK04UgTjNz/wfUmdSiE7CCCYYGXbBVfFdJo1qtOqZgP7rhnnaUfChglkbW8X10oRN57FDJTiN8ZuUNxJEsqFNxPpenlcyRx3GbcJlGhzNHZo58DEfW6vd2Wq04T2SOzBz5kjjyXHmWBy6cPTYMKEPS+qRLpARHwTCoB+fmAYbBAp71+vHt0gAMhowgWTqOGyt5qo5LgxiI5ssS4WKJnHs4hQglojja5n4X9L0c9IQb8liXy6XNZjN/O7r2hxPrAHd1bsagUZ7lcml/+tOf7Orqyi4vL83sGK2az+fuXMisUqnYly9fSiczERl6+/atk0y/33cHJ2//7u7ONwJjqM1ms/R+i1rtkKsMAP3d3/2dbbflDafYC/JCv4vFohRVYxJQqVQ8EnN1deV2hK6ZmJATjl2Ro4/M+BuZEpkholqpVOzq6sqjigAjzqv2SanX6x5FAoh2u0PuOkvr1WrVo160v91uXW5EEyGa1WplP/30kwMCJLfdbm00Gtnbt2/t06dP3k+NkCMz/t9ut/b69evS5KAoChuPx7ZcLu3y8tLu7u6s0Tic9sUYaZNIOZOvarXqef1s/oWo+RziJ/JLnri+BBLZ4Z/9ft9TUYhO6uQUnwLYsSXkwjtsAN3JZOIRRvwEQl8sFo4r6Lfb7brtICNkRl4+KRpgFXsbFNDNjies4SPYDBMAnXhyPTavugPz0KMSFbrU1QsmSnp9jNDRXw0Q6XWQPT6jJJTL40rmyCNHru4nu9vdNnNk5sgnc6Tdw89umzkyc+TL5Mhz5Vc/cNGwkoE+vWrHddkNRSkocswsxssRlxxN2W63/ekdwSlZYGAIgUgL/QI8zY4b3Yh4EBFaLpceSbu7u/OIG8YEIGu6BU6sEUQz8yhIp9Oxb9++ORmQx8sLADGg7XZbimjpEiUyJiLDJliictVq1b59+2aj0cgGg4EVRWFXV1dWrR5eRjgcDu3jx49u8Lrc/erVq2Nu9D3JFEXhubxspiQ9oNVq+eZMgLvf7/vSPJE3yPHy8rJE9Eri5N+yoZg3g+OoZubHw0K43NNoNGw8HnvUiWVxbIk9UURM1YnJ1765ubHdbufgtd/vnYRYtiedAYLmJCvNpa7Vap7njnOTqqKRJR6oIEP6ROS6UjmkjSh5spF4Mpk4UVcqFT9NCfC+vb21/X7vpKfL7SqL//qv//Kc7fF47CmMRPCI9jH5wSc1ckYaAhM69gcwydITuvBd7LharZYitkR0e72eH1G8Xq/d/29vb32vyGKxsHq97idt6d4VSP7bt2+2Wq18EmVmvn8AQgdgu92uRws1pYaJRoyAcS+f4ZeMjRQwAFi/o06ic9giaSUatdMoPaSC3OkDk1kz80kt/gXZg8vsJcA++YkYg1/GVQMmJvmB6+klc+SRI/dEie34wJk5MnPk4znyYK/z+dzMLHNk5sgXx5Hnyq9+4KJzGBrRGIRJB5UAACycDSES7UPoRLi4f7PZ+HsLADyeNlEcRszgUZoafrwXoNRIAoogeqb9jcAOILCRlQkzUT+AUZfxIaZv3755rjXLtLyLQSNcECfkx/hIZWg2m/bmzRu7urpyh8Thbm5ubDgc2u9+9zv7+vWrt8N9pG4QVWPTMwWQ10gsMkAObIBlP8Jut7PhcOjXsrTLuNGJkiuGj56U8JCamhEFAAAgAElEQVTHZDLxY1aLorDXr1/7/YDDx48fvS9sDuZdLbPZzCNKm83hPRuz2cwjkXp6DiDDQxAOySSl0WhYt9u1fr9v3759cxv74YcffJLDplciptgX+dH8j33zHe+6+e6772y5XNpoNCodoQx4Q1rdbtffl6H6YTm/2Wza9fW1ffz40Sdvm83GU0bu7u4cwPANIln0TyPv2Dy6Jjee+1arlW8EhjyYpEFImorEBAc7mM1mrkcmc5ycxf9MdCaTiddJ1Lzb7Xo/mcRUKhWXIfbFxFUjxOiaaDbvLdEIOTKlT9g4RKL4qJFSM3MZbDaHzdO3t7fuj2blY635nwk4EzgmpkTUIAfSL7AT6lW806gc1yI7tR3GoisyOrZcHlcyRx45kskQ32eOzBz5FI4s7mXbbrdt83mTOTJzpP//UjjyXPnVD1yankDnUDSD0ygeoM/GvuFw6IJRUmIJmSVFBIlBIAyif0SVdrudb8BsNBqe/81TPvWQt1sUhR8BulwufUmX5clut+skiQNhHCicXHvytAE+HIEUCZxtNBrZq1evrFI55HpzUhHRLJaL2QiJ0ROBqVQqnmML+DKWzeZwMtPV1VUpRQCS/e677+zu7s7TI1ju5ojSRuPwwsEffvjBx7VYLDyKQF5+JACMVSN9AFC323UZ9Ho9P/ocPeOcSk7YDWkatMfkoigOaQ2DwcCPuf348aNHbaifJfDb21uXGZE9dDccDm0+n9vnz59tuVzad999Z9Pp1KOkyIZ+XV5eOhgsl0uPqH39+tUjT4PBwNbrtX3+/Nkdlqji5eWl2yXgBChMp1OXcafTsbdv3/qGVU4LGgwG1ul0PMWBnPI3b97Yx48fHSixjVqt5vZweXlZmmQRLSUyzniwQ7MDULHpWiPzTKR2u50fdYzeP3365Kk0jUajlAZVq9VcTtgfUT6AiwkmZAMYMyHBR3kxJXjB5JCJBRMk3kuzWCxcr91u1/0PHeETRCyJ3seJqaZpgAWk9jBhVZ0uFgvP+dc9GpCfrnIS4dQInK5SKLYic3yQd7vQp1TKjJn5d0z48VWIn++RAeUxpJJLuWSOLHOk2XHl5r+XI5cviiPt13DkfQrhYzlyb39dHLlYLq1WrVqr2fJrM0dmjnyJHHmuPMsDF1EgGtbIneY/6rIjIN5oNHx5GOciFxsyUULSPHT9jFxa3q+AgNgEC9BPp1OPjkEYGkHUSKKZ2fX1tZmZzWYzX1r9/PmzO+DFxYV9+fLFzMzb0dzTWq1WymPudDoO0IyVJ+/FYmHNZrOUQoDRtFot30wKuOIUPGFryg/pDLQ5Ho/txx9/tDdv3vhyOhEJ0khwxu126ycK8XJJjJFxomPN0zU75uCaHaMA5HKzqZX6WOpFTvSd+pDZdDq10Wjk7z+BPEktgRzfvXtn4/HY88s5PQlb+Pjxo0dP6QORL7PDUvTFxYW9evXKRqORp56wL6HT6ZiZ2d3dnRMbJxkBQGxs3e12dnV15ekGrFrhE9gsusP+GXfclDocDu3i4sI+fPjg0VI2tQLy79+/t83meHwudQPe2+3hiN9Pnz7Zer226+trjwxtt1snBAURAJ3UESKf+KfZIbo6Go18gzwTlYuLC9d5v9/3d79AADc3N6XIL+k1vV7Pvv/+eyeV8XjsugHkNXrd7XZLRMdLWcEPNoWDV/RxNBo5+WDf+LBGWrFRjYQiF8gREhgMBjYajUpAT6oIbUOERDaZqOAzGk0jjYOJKxNG8JNJuEZUiZ7GfSIaLVZ/I1iitoiMkRsRx/zA9fSSOfLIkUywfvwf/8t+/B//6/9vVeTy31X+j0dc8/r5mqvf27JZ5sjMkS+LI8+VZ9nDpcCuT/Wa60mneWJl+VyfXBuNhk9q2XBaq9V8GVqXDTWvnDaJJpE/Tmk2m25A5G4TqcOwK5WKH9tqdkwLxGn2+70fD8ob12u1mudLIwcepNjwyfhZ/bq5ubHXr1+XJubIZzAYuEFwOhKOzRM/4yK3/f3797bf7+3Lly+eOsHyrhoibzLnwQpwwDh5QEE3RAKIvnG06MXFhRuxpoeQ40w/iXgQ8WI5n5xs+snDBZG/xWLhUR2Wzc0OD5+vXr0yM7PhcGjb7db+8z//0/7whz/4caY40NevX+39+/f25csXz42nDgDy9vbWLi8vrdfrudxIqSCXmZN5bm9vrdlslt43Q858vV63q6sru729dZl8+PDBut2uff361U9ZYowAO3bNwzUpqeyLINKMnfb7ffvy5YtHQoku9no9tzcil0Qq8RezY/48+dlm5hE6XqiJfRME0NRYdMy7ZczMX9aMzb97984+fPhg4/HYjzUGI8yOOdTz+dzli9wgncFgYJvNxv16t9t5O0T4mDhymhttKD7QL1KMmGgia1JuFouFjUYj6/f7nupRFIWnopBKRLSNPQikZvH+NCalBCD4gZQVjME4ItJ8hy+AI6TFaGqDEgB2bWY+WSfqZnZ80aemqDE28Ig6dB8FuKARe/BZ28zlcSVz5JEj59WqFdu1mfHgvrfCivuzEOL+wMNOr33p88LOPvIX5aPg/eoCXdz/WRybO9Qv9fJdqqF9uMZS1xUPqnh4W/wkXlnqve3v5RS7UiT+fnDNfRP7cOWD2AlfpcZo8lnieq/1VJ2pjp265LxIzPZmtUrhwezMkZkjXxJHnivP8uJjOocyeGpliZhB6ESaSTmgr4pCyQiMp1wiEpALRKQrKkVRuMEhEAyBt6qThoCgeQhgky1L+yxvsmkUo7q+vnanQGFm5u926Pf7vsRMagFRhvF4bJ8/f7br62tPEdCjPNnkyak1jJmVsS9fvvjqGSknTPp/+OEHu7q6souLCxuPx7bZbGw0GrlR6xJxt9v1ZWUz84ehzWbjOeS8iLBaPb7xnIesVqtVyhunz/yvhthoNDzlgNN0eNjiCGGW6DH29XrtUQ/ARVfgSGXhfRgXFxc2mUz8IWaxWNjbt2/9oQ3iZ2n86urK02qK4pBbzAZfQJQoGBMMHo6RJxOUP/3pT76hXDdzE2W5vLz0Fb7BYGCfP392uSIr7ifFUFNgsHd0w3WAT6vV8gdm3m1yd3fnttlsNv29OEzg0BMbnC8uLvxkJMaGf5H6o5t1V6uV73XYbrf2u9/9zk9hQreswpL6c3197bn2pK4MBoPSSUTk4ROR40H47u7OZrOZDQYDJy/SebA1bIRABxHQjx8/ur8D/Ho6E34MyFcqFV+R3G63boekfWCDugH98+fPnj6hq5qkUGi6GFFHTelBj6PRyBqNhkc7wQ4IlAd06oDY+F/THcyOpyzpJJ+/dZIPbiuhcK1Gc/MK1y8rmSMP9t/7f/8v+5//+n/bxcVFkiOr1cN7nfb7w4tnm82m3d7e+j4QDo7QqD2Y3ul0rNls2ocPH3yS+fr1a38PWa1Wsx9++MF6vZ5zJPu4wJKf48jRaGSbzcbbBouZENZqh6Pc2XulHEmmymaz8Yl8rVZzXez3e9efmfnq4H6/d45EZ3AT2ESmChkg8NTXr1/t97//vX38+NHTJ7GJ9+/fux3yYmLlyHa7XeJIAo1gxHq9do7/9u2btdtte/Pmja+WYFv8VnzkOHTmGhzIMZ1O7c2bN/b582fPIPn69avbHvZ3s7mx/f3qb+bIzJEviSPPlWd54KpWq346DhNmBWs6RTQMwEYo+oTJ8ikrK7r5U5dviRRSdDkSR8bAAAZyrwEDjIt6cR5ditXVCfo1n8/t1atX9uHDByuKwhVPFEXTMPhRg8CgWXHi4YnlTfLOSb9ATqvVyvOeuQaQIg+41+vZ169fPS/91atXNplMHCAwIsYMWQM6nNxD1IOoKECLQ7FpWCOxSvyQQKfTsfV67SROvnm/37fPnz/7EjiEjrzZmKsTE0Di8vLSIz3NZtMGg4GDFps5v337ZsPh0HOkzcydUceOjnnhMPnQmrMMwavcGDendpkdjhb++vWrR8Qmk4lH1zhRiNQe0hAhOCJT2DLph/QBgm21Wvb582fb7Q4nJiETcrF56aOmKmFf+M5ut/NN6fyPn7VaLX+I1QdmwJAIDuPDpyeTid3e3lqtVrPxeOx10Qc9zUtPDQJ4OSoX/8D2WWHsdDoeBAAgNWIFCBJlNCtveN/tdr6hH19CfnqyG3WhB/pAAWQhS3ACWXENxMbeAqJq+Ab6YfKsuMkPKWjUybX4CrhJUbyjfv7WfQWQoUZmlWwo9BufR8a5PL1kjswcmTkyc2TmyL9+jjwXlHy2B65KpeL55bpBjU4TBZhMJqW9SQDwaDRyIXJSUavVcqcCuBiw5pRj0GYHwOj3+/bjjz/6ZygNwfK0T3uAA3UTTeGpGME2m8f3Z7TbbRsOh54GBxHxRM7qDY6tGxaJKA0GAzMzfxM60QjeFM+SMH0gEsRKmJI0UZSbmxsrisJTRiAYyJuNqDp5Qkc4p9lxkzOkst/vPeJ2e3tr7XbbJpOJrddrfxEj4yLiQV2r1cojTqTTcQoToEakgqNzSV8BiLAxHB5512o1Gw6HNhqNXN/IQwGbCMx+v/fThjiRSeXa7XZ9GZmN4qyyYQfYC85MW6vVyn772996tHcwGPgpQrvdYb/bhw8fzMxsPB47SDPxgeh1/wQTHyJt+/3eBoOBywp9mpmNRiObTCald8MAnOzbIAWgUqnY+/fvPTrJkr4CECt4RPvQt9kxwmVm7ofVatUnC+Sh39zcuI2xKVYnmI1GwyORyIR8bWwI+7+6unI/NDumDKg96yoMGERbpMWiO+QOmdMvndRALGAboE4Uj1UGXUFA7kxUkJniDlE5CAPZQCpMzCDv7XZbSuFiIoCtQ2xcS791AsRYwQx8CdJAZugSTNS0NMaSy9NK5sjMkZkjM0dmjvzr58hz5fyh8Y8sAL+SCEACiCBMOsV1k8nEB6QpEESRUB7fozD+16f88Xjs93C06X6/9xf/cZ8aHsJmmZ80MAVbjB1DWSwWDhyqjFgfho3R8vJKooson2gVKQ0KIkVR2HA4tG63a41Gw6N8ACkRGTU46gRIOBVJIzHk1hKF0ONEOUEJQgTciTBxYhTRq9lsZjc3N7ZeH452ZaOqmTmwEAFjYyrpDwAYoAKJkGoRr9e9cWbHF/0RveI39kXKCOCkZK8v7sOOSI/BkbALdeBms+k6nEwm9uXLF7u7u7P/+I//KOU1s5RuZr7HjP0RkCipGaRlYhdEvswOm9jpz253yNe+uroq2Qt2O51OPfKGLJlMEPGt1Wr2+vVre/funb1+/dpJTE9LSoEmxElkVqNCl5eX9urVK+t2u94/NgiDBxz2wsQDPwF4sTF+aO/q6srevn3rQAvRsZeQqBg/SrCk/ZCORDoRkxaNGJKOontbNILOJA05Ine1Q1IQwA7aQKbgDvbJZIw9AuAM0TlkYWY+uaNNbBmfh1zBAn4gMXBYo7b0SVct1LfAPr0+l6eXzJGZIzNHZo7MHPnXz5G6AhbLsxyaQQ40HeDJEzBEMBgRIKeGxJIhudkYHifEoQAA1cz8M+rp9Xp+OAOn62Fol5eXVq0eXuKHQ6FglKgRGV1CBIhbrZZvdmRZXCNzOjYcgXEDPjghhMdeLXJ6WU7HkJTI+JvIHZtFAUYIB+NgCbzRaJSO2SR6imFhbGbmaQq8M0MjWkQlMEaW9TH229tb32CN/NEdkQecU52egy2ItkFgHB5xeXnpkR4di9khWkvUhzFARpBjo9HwqA0TgW63a+v18ZhlPgMQlKSZoBAl4nMAqNFo2O9+9zv78OGDR54B3+FwaEVR+EmIRIEqlUppzx9yIloNSNImEUKi4xH0Go2GpykQ4YPIdQkdO57NZvbHP/7Rc8DxK1KUsCPIgz4Q0ePloNglvjedTv09N+gPPySViDERhaMefZdHURS+Lw/CJ3o5n8/9OjCAqBapFsiIMSADJpfsJcR++v2+VSoV/xwf2mwOp2myURk8u7i48IgxusLeAHeNJuokWiNlTETBsKIoSgfOcB0Yo3gKUeDXkLwWcIeonGKz1gNO0kdwUUmePPpcnlYyR2aOzByZOTJz5N8GR55b5XqWlEI6qZsU6awuX+rAeVrUlAqz4xGf5LAjQOrRZV3uNzN3CI675IWC2+1ho+SrV69KT8YYm5l5vSwxs5zPNfSPtslz/vHHH+3du3fuaDxNkyfKtTg6S59slOT4XQiMI0eRJb9x7qI45MJ3Oh379OmT1ev10pvb37x548v/GiEh2qURN6KJy+XSc8QhQp7aAS5I9u3bt755ErkRrWKDcVEU9u3bN19Ox1DJeeZvctQhLzPzDaeAlpl5+0VR2Hg89nzu7Xbr0TGIisgGmz1JSen3+07kuryuQEAqB5uE6QP2xVI7h4YAeCx712o1f4Hk9fW1/fGPf/R6Npvj5lAicQos2LhGY8lBxg4gP2wTMNWJDxvKd7udR2wZJ3ZmdiB9yOzVq1cOjJy8ZGYexWMJnzQGnXgT0UEekMz19bXbOmPiPkCbCYeSbLV6PFaWiOZwOLR+v+8pKOqvTCABZQ5bMTtG04h46eQDLGJcyIxcdTbWYgO6oR9fI3KKfaMLM/MJh05+8CEiy9Vq1SdipA4xJrU7xsP3iqcUtWXGpVjKeBmDYjF9wYbAclYdsEuIGYLL5Wklc2TmyMyRmSMzR/71c+S5Fa5nSSlU5bCkpsuGOAwdQbFmViIKJRSigTi7CgCBQ1pmx6ghigbYtW8AO9EAje7oUy51Y6goG2AjqsgTOxETPY0QMuHJVzcED4dDj0gRTSLix0sAASKUTR4/wMiYcUo2umrKB44B0RJN4xrdwEiEgzYvLy89kvPp0ycf66dPn5yQkVe1WnUSBvh2u52NRiMriqK0KZYNvKQTMLFQECMCiH53u53d3Nz4Ua46Bk6NAuCwI/QKAOjGUJ0kcMQr0Sp0TYSUNA1stFKpWLfb9Q2sTIbu7u4cGNAL0Y7dbucnb5FCgI0hf7VxtWGIG1lRt066OLHo9evXflIkciDVgckLpDqfz63X65WiVrqsDhi3Wi0/IRMCRN/4V6/Xs7u7O7u5uXE5EGnklC9sGB83M58M0Q+IHxsnAsskqlKp+BjMrORfEDBACHYAonoPIE4UE1JS+eKDRNZ2u13JjnWTvaY/gCvUUa1WHQ/MjtFy9AYW8coC+kz71K+2oZMRcAk71Kickh8+gK/pyoqSExjLmBWnwbhcflnJHJk5MnNk5sjMkX/dHHnugetZVrjMzJ1BB61/6xMi1wNi8emap8xG4/ieIp5cNaoDKKAoAA2Hp/79/nCU5rt37zwKQu5mo9EovTsBobMUznI9dREhoH0iGnd3d94vjfIBMPX64ZhNPTIdGWmajkYjIT+uJ6JCtEojAkSSAFDahxy4j/Hr+xem06kNh0M/2pQlewCXjbE3Nzeed45xATQ6GatUKv6OCRwZ4ACgmHSQikF/2AjNCU8Q4+3trc3nc08LmU6ndnNzY1dXV1YUhX358sWGw6Ht93uP8kHWpAqQWz8cDj2dBJ3x/gkcjnQAoo30w8z8BC3SWiqVikc1GSfpB5A6gAdwmB33E2Dr2C37BZDrarXysbOUzpGxmkM/HA6d0LHXyWTixMFekHfv3tlkMvHIJ+9fw+bxOSYq2JimJpiZ7y9Al7Vaza6urmy5XNp0OrXLy8vS5ItoFptokXelckzBwSdIo0FvCnq6wRwb1f5xdPDHjx99MqqTXXTGZEiJGaCHmLBn/J0JGLbD5ADiVOxiIoC/QPDoSP/WqB7ta1/BUMVRSAL8AAcYh07O6SM2SNGJnP5PFI96ICadDOTytJI5MnNk5sjMkZkj/7o5Mq6uafnV7ImA9OmOJ0dvRJ6G9SlTB25m7nwsaTI4JSF+a1SQ+iECQIoX36HwzWZjvV7PnYe8Y5bKAQLGMp/PPbpAlA0hm5lHFiBG+o1xkHLAE7+Z+d+Ax3K59NOSttutDQYDX24lFYE66TObUpENxLTf7+39+/f+Fm+ioBAchILMAfJarebvNgAYiuJ4EgxLyj/99JODCdEpImY4Ju+ighiKovD3Rrx7985Go5HnzXN6FdcDBgA3dU+nU3/xHqkWRBcARMgIWZKWQB9JEYGEiWrtdjtPCYA89vu9HyOMfFQXkCPXb7dbPyUKWyWCQ/oJ7wFBVwAr48Zpa7WaAxnRzs3m8MJsPZkJm6DNm5sbq9frdn19bRcXF3Z3d+d2vFgs/D0n9BsQg1QBDPSoPsZRvUTI9F58Bt2hGyJcRXHMzVZwZIJCW+iR77EN7SOTRY2yKxgrqHc6HZcBmAE4YlcKxJoigX4pCsykURFBw59pB33wPfZB9BD5Uw8TK7ALf2bywSQbW+c76ke2+/2+dEQuY0a+TESRE31hfPymbl1pURLJD1xPL5kjM0dmjswcmTnyb4Mjzz1w/eqUQl2WY1B8ztKrCp8OoggGygDY7MkJNCgEgOEJVkEdZ+Rlb0QNOGWFVAEcuigKf98FS5n8EHnA8ZQkJ5OJRx4gCU4xioLWZU7eeaAbhVmSNjM/tYZlY31yJoJodozo0a/4RA4wcTINhomzcRoPBs2LBXmBHqc7EXWMusXgITImEcgDIICUcAYlMIipKAqPIk6nU79HAZgc61rtkPv95s0bJ9SLiwtr/m/23qVHjiy5/jweHu/3OzKZmSSrit0FqSFo0asRtNH30qIH0KeYb6PVbCRADQitRlU1ySIzM94e73eE+yxCP8sbVIn/ooqaRdEdKDSbzIxwv9fMjvm5x8wyGVunSqVie1Aul0377Dod0gjsinvkiJrAhWQCMJJknZxgcABFvt/t5sT9M+8D+YXrkNgGdQRcy+XS2j+7jDUMM98LA91sNi/kQfwuWv9arWascS6Xs8GmnuepXq+r3W5fdHtKJpOWQOVyORv6jAyD5AJ/df/zfV+j0UiSrCgYX8PnSeBYi81mo9VqZYmk53nWJelwOM+mcY/u2U8AhUQBAHH9jrbV7ukB8geXgef5iSP4Fc/IQFV8kT0FMNlv7AltOp2fWDc3ccD38VHsjfoHEjr3VMSNC+wDa+qCiasldyUk7ve6yTgggk3x3R/GWtdu4+vnXzFGxhgZY2SMkTFGfhkY6Z6G/Rcs+G//5Wde7uK6b62uBIKf4ecIQqfT6WIjOIJ29dM4HobD57AoOCIbgjESVK6urpROPw3R4/K8cwFlvV63N35+jqNqfs7zPBtYx/dhaLA6bIYbvIrF4kWAd1k/mCNaoPJ5HFmzee5xMGuGxIHZGLyRFwoFjUYjTadT+z2OPwGTVqtlIE+ggn1y5SUEkfV6rfl8riiKNJ1OdTqdFASB+v2+zZyIoshYRQIFQfrh4cGmuyOdoAsQjgBLBeBjxABZMplUp9NRIpGw1rzsF7+TTqdN8gCjBBBJT7MWPmSZYYNg/Ha7nSUagC2spPTE8hDosXH+LplMWlH6arWytZ9OpyZ/cFmdMAztWfAH7s9lU9lz1ot7wuaZVzOfzzWbzWzgZRiGarVaFrTwR1cWAfuGfwHEJFs8C4kTdRb8O+sYhqF9Fv4Jo03CgR0TnEkoCFhIIQA1hlmSFJHsuOwUsiXiiCu9QLvvMunED9htfJa4RDzBRmhxG0XRhf7f9S+3IJjPgDF1k0/3HlxGk9/F7tlzwBL/cGMnv4PvbTYbAwSS7w/Z/zAMLxIY1svV4WN/bvLjMtvx9WlXjJExRsYYGWNkjJFfBkZ+7PosbeGlpzkc7pv2hzfDW6LLTOGk7sPBlOFw7pu0718Wz7GxBIkPNaEMH9xsNrq/v1ej0ZD0NA0cfbD0xETCKqIBhkGgow3Huq7BuiCZzWbVaDTkeZ51ROKeMXwCCnMvZrOZMRtuMGUNYQ3ZaNaK+8Zpfd9Xt9uVJOvWBAjC0FFwCWAcj0f7WUAXJ16v16rValoulwqCQJ73NBfG8zx7NhyL+8AODoeDGo2GMbhMup9MJhfdkaIosrkZyElWq5WWy6UFEeyG4EJXo+12q3K5bGC+XC5VLpcVRdEFw0gg3Gw2tu9ukTB/T/thpBbT6dQcE6kN+wnrRHAAPBKJhNrttjk6n5tIJAxs2HtJNuHd930r8KZOIpFImK5eOjN8FLA+PDyYbyWTSU0mEy0WC6VS54L28XisWq12kZDAADOHh3vn3gAWN4jAygJk+/3+Yqij5507ZLH3FEDTsprPYbBrrVazmAEozmYzkznk83k1Gg1LNPD9w+GgarVqSRd25spTACliDLbEcwKyJBcwXMQZpA0uwwhAAZ7UgnBvDGKVZDUGsNbEB5JKPp97AARI8mDk3ToU/Nz1excgeG6SW/fkxJVTYZ8/ddIAuLksnctqkgzH16ddMUbGGBljZIyRMUbGGPlZuhR+CBIfsqEYMpvPTfPvrsNjeFEUmbGh/YUdco/x+H0+bzKZ6Hg82hu3JHuj5WgeJoAWnSwabFCxWNRqtTJWjyJUghGFf2EYWoEtgRDmyPefZmjApGEkBMvj8aj1eq3dbmeda9BuM5wwkUioXC6bRhhj4+ifgJdOpzUajZROp9VoNFQsFi/2BgbjdDpdHA/zjI1GwxhGZBmr1Uqed571EASBMTnZbFblclnPnj2ztrMEHOyAQNlut9VqtYwRxcBLpZIxpVdXV2YDLjvl6uOlc9CqVCpmE659kWTg3NgG7B7ORuBjP5LJ8/BIgjd2mEwm9fj4aEGR4OoWomI3/B6SBhgeEoQoigwg5vO5BVWO1nkOipDT6acBlkhCCOYAXyKR0GKxMA04SQrdq+7v7y+OuFutlv0OmvtCoWCF7Mgr8BcGpAIgJFTu+khSs9k0trZSqVgwm8/nZm/YhZtwAX5IP/CHTCZj68Jk+Z9im9g7WDtiDiBK0jGZTNTtdjWbzYx9Ph6Pms1m9mckKOv12mIYfybYA6y0UfY8T9Vq1ZLM6+trNZtN1Wo1kySNx2NLNlzmFFum3sCNge6JB8/kBnr2ivbEblx1mUtiJXUx+Nj9/scAACAASURBVAmxz0362Qv3hQAf4vc/BKD4+rQrxsgYI2OMjDEyxsgvGyN/8QuX53n/RbfpHrURXHjzc49/MTYeCMPj2BRGA8Pg3/gcFoTP52f7/b6xgsyNyOfzNmeE4C7JJtTDlHEUXyqVTDM+mUwURZFtfjJ57kzT6XRUrVZtLWjLSVCUZEEc+QTfwRs1nVY42s9ms6rVahaQMDqKawFDPuN0OqlSqVhb2jAMTedOy16cC4dFD3w6naxlLJ2X+BnYMpKC2WymZrOpZDKpdrutZDKpZrOpr7/+2oAW9qNQKKjZbJochc8EYHe783yOcrms8Xis8XhsNuGyTLAb0+nU2BKCOkzb4XDQixcvTLsNC7Ldbi+O2kks3I5HrryDble5XM6AkI5OJC8UqLrH8slkUsVi0eyGgaTYEI7KsEDXQVer1UWHKDchw2ZYA7doGkZxvV4bsMAmY4d8TyKRsFk2o9HI9gGWCtnMdrs1sIZRJNEB2Pv9vhaLhbU93u/3Gg6HGgwGtjfYKcGRPYIdlJ6KVKMosmGQsHHMeNntdjbQkfWma9RqtTIbJ7mSnlhsWDISQ3yfBJKfo66DZJS6jXw+r0KhoHK5rEKhYHp5N5nyfd+Y7uPxqJubGxUKBV1fXysMw4uZLcQcN2YSL7E/6akdLnbMxc9++Lv4UbFYNCkOiS/Pst/vtVwujcX7ULaGbyB5cWUR7qkL/x9QjK+ff8UYGWNkjJExRsYY+WVg5Mdeuj4begIMrrGyQWwWLAz/jl4ZQ3b1r8woIPDzsCwmD4VT8O+pVMo69mBoMB7H49GYuQ9BhcBdrVatc1MulzMWz/M8jcdjc3IMlGN/JoYjqfjwaBag4PjbZRtcJtNla4rFohkNGw8bQIEnAZx7Oh6P1r4XJgdD47gZFkuSFUDDRqbTaXU6HXMqWtx2Oh3VajX96U9/UrVaVb1e1+FwMJbpdDpZQC0WizoejzbwEm02Gtp8Pm9OfX19rVwup8lkYgwaz8nRPS1Ms9msVquVKpWKAYXneWZjbqJxOBysPgBAkZ66J7ka+XK5bAEpl8tZUS17CZMDW0NNBTbFs7gyFkCVBIBhh2iMsV0SEDcoEkhoq+oWlbZaLc1mM9Nx83zoo/G/fD6vyWRiazAej61QfLPZ2IwcmPHj8aharWYyCGocoihSqVRSNpu1ZAoZy83NjQV86Zy4tVot05673bhcfyDQucyQy0ifTieTl5DUZDIZ6xIGsMP6FQoFY3P5XPYI6Us+n9fhcLBif5fJhCVl7Uk2+E4YUt/3rbMbQTwMQ/X7fd3f31s9A/UB6/XaZBQusGA7PDuATfAvlUpWTI7/w+yTcH0oV8JWsQHWmJgp6aLVtHsvJAYAPn4EqLj7RCIQX59+xRgZY2SMkTFGxhj568bIj12fTVLofhEGzmLwM4ALG8MDe55ng9ZcVg5jOZ1OJmvgwWFF+Gw0qQRPGKper6disWiyApgVQIujRe41kXhqTyrJOudkMhm9evVKzWbTQGE0Gmk4HBobANux3W6N4ZNkwY6j1MFgYG/KnU7HnhtpAiwSIDwcDjUajczROXZPJpNm/G6AzefzqtVqJhMhyLlH+xheIpHQcDhUt9s1LX86nVa73dbd3Z05wt3dnTkl69Jqtez70JrTBYf9IVi6Rg6wclwLMwKbVS6X7ei+3W6r3W5bR55KpWIMp+d5arfb2u3Os1Nevnx5oSHGOWhnjE4Y3TBdnGCH+fxaraZWq6V2u22diLBPbA6QAVzW67V9RxiehzjC8gCE3Ec6nb6QAABqy+VSk8nEwIuhmJvNxpg79rPRaOj6+lqtVkudTkfPnj2zOgBYPDpbVatVtdttZTIZK4LFx9gb1oVORjBY2Ml6vbaAUyqVVKvVjJ0tlUr2PC6rRNImyRhCWDXpqSMZ9wAD2Gw2lUgk9P79e7tffIS1QcP+m9/8Ru122zT4+D2yJPYJnX673TYbRNJCQGePYVIBG5dl9zzPEp90+tyJqlqtKggCDQYDPTw8mKae/SKuMHuG2AJoEdfcPeGzqSkhRvFnpF3YB8kwjDLrj3QMG8cWsUtkLIARa0OSLsmSOWpd4uvTrxgjY4yMMTLGyBgjf/0Y+f/LCReGJj1NtHdZNZwuis4FgLB73CAbyGLwlg7TxFs5RsuiwzrxvRiuK+FIJBKaTqfWChVmDi0tx7B8BozUarXS1dXVRUcmAiYDDiuVir09f1jcTJCpVCoqlUrGOFBAO5vNDNhgzhKJhBqNhgaDgQEK6wA7uNvtDCAbjYYFGGQXmcx5fgcsJIGAe3MBJgxD1Wo1OwavVqtaLpeqVqtaLBZ2LJxIJEybS+CkMBM2ZbPZWDch2MbJZKLdbqdqtap0Om2aataKe8PRjsfzPA0X/On2czqdTDrRaDSsuDidTuvh4UH1el2TycT2qt/vGxiwljjo+/fvjS2EWUE6kkwm9fz5c02nU33//fcql8va7/cKgsBAG+aXTkXIIZgoT1vZKIpsoCGsG3/nsiJhGBrrRXCDbaI49nQ6mSxmv9/r5ubGfAzNNGsoyX4H20Hakc2e2xwTZIbDodkhQQTAhw2G7SSB4b77/b5evHihbDarXq9nzPtqtVK9Xre4wPORVFGkD+ATF9iHZrOp5XKp169fq9PpWACGjfQ8T61WS7vdeSglAE2iByPNsEvskoJu4goaeZi+0+lkMhVJViuCD0gy1ng8HlsyiD9LsmDNoFQCOc9AEGdvqJlAQkVnLGoE6vW6DalFJkQCAJMP+wdrCngT/9xEnXjrdrPCPpAQSbqQuODPMMrx9elXjJExRsYYGWNkjJG/bowkbvzU9Vm6FHLci+SEQM/CYywc1RGwT6eTDZwrlUqaz+cXx55uwavLrG42G9twGDsWiP/cVrK5XE7VatUWjInkSBYo4kSeQKec9+/f6+rqSl999ZXevHmjIAjUbDZN895ut7Xf7zWbzezYnKP3w+Fgx+6wVqlUSvf392YcyWTShhXS6YnOPovFQs1mU5JsrdDTUhQLEzCbzezoXZK1osXBMAZABJ0uwdBl547Hc5Hyn//8ZwNw1mk4HJrevFKpmJQE+Qe6aBIIZpc0Gg0DbphO2nhyv+l02owfDTtsE4AC+5NMJjUYDCw4sR4EPPbXZQpxTgLparXSbDa7CBywMAR0CsbpFkQCgjwG1hJ5Ap2pYLwIZKlUymwMh0QzzBG5q8tH7099ButULpct+Dw8POj9+/dqt9sKgsBmh0jS1dWV+d5isbgAZtYnkUjoN7/5jd69e2c2tlwujT0GpEgMs9msqtWqBZvZbGZ7NB6PdXt7q1arpYeHB0nngLnZbKxI+eHhwToNudIqbJ7PZu9Ho5Ha7bZGo5Gxru76NJtNpVIp/fM//7MymYwVPHO5TBh7t9vtLiQfhUJBi8Xion6BdUOi4fu+SRLYV6Qmy+VSvV5PnU7HfI06CWy2XC5bcoS23QUWEnA3nuHn1EwghdrtdsbmMe+GxJrYCkC6HadceQQMJOBNAuu2NAZ8XCCS4sYZ/9MrxsgYI2OMjDEyxsgvAyM/dv3iFy6O3j9803MfAjYBI8K4uUlYIjShfCaBUJKxW4ANhoVz8vMwWAR0Nr1UKhnQofUloMG0oJPmTbhQKKjX65n8IIoia386nU5tcB3Ho2w+b9+dTsdaX65WKysyRtf97NkzO/qEVSAowgIBLjgHmlPWhgCKQXIvsCxXV1dmfMfj0VgTCjc971y8/OzZM7VaLW23W3333XdmYGjOE4mEvvrqK2M3arWaVquVSWAAEI7WYWIAUZe5pR0tR8+TyUTVatXmh/C/OBT3xRrBVuF0ruaW/XULWYMgsAAA89Rut81+UqmUMXfusXIYhmq327Y/mUzGClavrq7MuagfABRw0lQqpcViYSBAsMAWuRcCBECH45JEwES5pwuwqGi2+/2+ttutKpWKyuWyttutgiBQIpHQ27dvTQYEqxaGoWnLff+paDwIAgvcyWTS9up0Ohee48vVatXsE105do0vBkFg4EXAjqJIo9HI2CvWYTgcGhDD1nqeZxKjyWRibaQ5CfjLX/5ihcF0a8Im7u7uNJlMLEa9ffvW9qRYLGo6nZpUJplMajgcmvwD6ZX778iEsCt80vfPgyw7nY7u7+9t//iMzWZjtSSn00mTycRYVp6fE4RKpWKyDBIUkhokVPv9/iI5LRaL1g5bktkRvgDYRFFkw0qTyXPBPEX+xEPiLvEWOyAhjl+2/mdXjJExRsYYGWNkjJExRn62OVw4JiwEF2+Q/CxBhCBEgSpBgEBOUSkBUtJFgR0LJJ0DqlsQx9E6hXuz2czYOe6NhWaGRaFQ0GQyMcOmAwsa0EqlYmxOqVTS7373OwsiFA7yhs5mo+WFDSG4+r6vyWSier1u9wtLFASBrq6uLtaTNeLo1tU7Y6AcdwNuGDrGjEQlnU7bYEeCGABIRxlYQHTGdMfhXpgLgm4fNuhwOLd3peUn2tjRaGTPin4cxjGKIlWrVdN2wya4Om+AHeMuFovqdrsX81M48m6321qv11osFuYgBFcCb6PR0HK5NHBqNBryfV9BECidTpv+mlqD8Xhsx9CpVMqK1avVqkajkWmUsTlJFuAARrpqdbtd0wHD4A6HQ51OJ6spgDnBJ7bbrVKplEajkTGksK6wStlsVv1+X/v93maMNBoNTSYTZbNZTSYTffPNN9rtdur3+7q9vbWWrDwTnXxcRhEbxDfdfY+iSA8PD1YYm0ql9Pz5c3W7XbPh0WhkDDvrkcvlzJ7DMDTWjqSUzmEUf1cqFWNaKfh/eHjQdrvVV199Ze2DXSkWbOrj46N1W5LOiSvxoFarmRykXq+bpIKgTIzhmel6hd9FUWSs+2QyMRkIyQSDV4kJ6XTa9hz/PJ1Oxsy57YeJXbBxSCeIOQA2Ugu+h3oQulMhJcHv3CJvt3BaktVOwC66pyPslctyxtfPu2KMjDEyxsgYI2OM/DIw8mMvXZ/lhAtw4MsIJBgfm8FxufvWigwCVg99KQ9P4R0XAER3G4wY7SaFa2jBWUyOoHkD5nPdIEtBL6zecrk07fl8Prc3dooSX716ZcEDTTjFo4fDQUEQKIoi0wlToEmA2e/Pg9tqtZodq1YqFY3HY2Ni3I5NrjSBYsXlcmlFgrVazQwAMBsOh9rtdjbRG+Cg1e9oNLJ9QDsPkwYo04HneDyaNAGgASBIxD5kxFKplMrlsjl3uVy2v8f4WdswDO1zWZtsNmsBm8TB8zzV63Vrvdput3V1daXRaKRyuaxarab7+3tjb0gkcFAYIoLPdDrVzc2N3UMul7NCUZcJ5nfpdBWGoUksYJWz2fNATzTfjUbjwjZwVKbcJxIJjcdjSTKpCAwbQQaJCqBCLUCpVFK9XjfZDfUOyCt4bhgyJDg//PCD3r17p9vbW5vzwu9UKhVNp1MLogRhujJtt1vTnddqNb19+1bj8Vi/+93vzEbcRADWCXafxAxma7FYqFAo6MWLF/J9X999950Fr/1+r3q9rul0aq2l379/r2q1Ks/z1Ol0LOjDcHHfnAzALO73eytQRvJAong8Hq199Ww2swSTAmnYceweW0deVa/XrXjb931LNjiJQIpCTOQ/AAC5ECBFwCYOJhIJS47q9box5STPxF0Ai+Gr+DXaekDWbbPs7gusJHEUm0Q6g+Qkvj7tijEyxsgYI2OMjDHyy8DI/9UXLhg0l8WzD//P4/PT6WRvhrA8aNMJ5kgeuGn0o0gsACCOGVkoPgd2xZVusFjpdPpisjpvpjAUfA9acxgT3nrdAASADYdDhWGo58+fq9FoWAHuYDDQ4XAwPTzadd/39fLlS1UqFWOemGDOd0qyIAw7gPQEYJ7P5/ZdvLW7xa6wYYAzb/E4F4boslqz2cyCPm/4gGSz2dTV1ZX6/b6xXBgXQYzPjqLoYmYLCQPOR3cmAKNcLtu8CPTKOASSj2QyaYkeTg2rAvjBsMGuUuyK3bXb7YvEYL/fazwemwPyrDAuMPmAN8kRPyPJkoZKpaKrqyv7fgIMwISMAMZmuVxqt9vp5cuXFqyZdj+bzUzLfzqdDNATiXNBu1uMy14izygUCmq32xoMBgZy6Me5Dwqg/+qv/krv3783BpL7jqJIi8VCq9XKillJDGDNKFButVoaDAZqNpvy/XPxK4XQbgKAL8IYufIWgCWfz1ub2FarZUGebmcklgAMLDJscTabNX8hcZTOkgUkFhQ1Y48ESeIUdoUUiWSDxCadThsjCZtNzUWxWNT9/b3VefR6PUuwSG7DMDSfdU80iGX4OMwojDUyJ+Qj2CS/j6wjkUhoMpkY6MBop1IpS6KXy6WtEbUCJGkkhvwMvibJkmdJ8QvX/+CKMTLGyBgjY4yMMfLLwEh8/aeuz9KlkJvihgg0vEUTRDAkHNQFHoIIoMIbuu/79sbMGygPD9vBw1Noiu6WIMUGNJtNe9N2Nc3cB8Gbe+M7OdZkMznS3Ww2Gg6HqlararVams/nxkZks1nN5/MLnTUOi4PDagIK6IYBZjqp8FYvnQNysVg0QEY6gUQD3XgQBJJkR+kAKGAIyAF0aHDZA8/zTJOM9rfRaGg8HpvjUnDr6v9hHTHaSqViQe94PA+L7Pf7KpVKKhQK6vf7ur6+Vq/XM+YRllGSOaW7F7Ty5D64UqnUBTCzVm6w4bh7NptZwMjn8xqPx8pkMup0OhawYT2Qu1BYClPk+74xZtQvrNdrCyLUS6zXa9Mow5oQfPCXWq1myYrLKmGXJA3ZbNY076fTSff397q6ujLggG08Ho8KgsCSjEQiYYlCuVw2XbJ0WU+C3SwWC7Mfjt2TyaSePXtmz+BKAYIgMFDGN/A7twaEzmEMWkWnHoZPLXGxO6QAsIzYNwkDNoAkCYBxWTQCJwBDQEf7T5wCJJBMIZ8iqMMSUqvAOqNNxwYoVMc/kZ5QDE68Yt8lmWQCMCGZZX3ZA+lJN46v8HPFYtGGpEqyWMg+RlGkSqViBeLYCsl2GIb2vPgdPst3foy5i6+PXzFGxhgZY2SMkTFGftkY+YvncLk3w02zUAQvl8EhyLjFahSeAhBsAkV1BBECGIwfgRL2jXtx742F4ZjeLXIkUAFg2WzWAjObipHDpvBG3Gg0VCicp38ztJCAy3PR6pTNWK1W6na71vml3W4rm82q3W5bIEZbjIOjNWdTXWaLQArjgAHQZhZHx5EJbHwef4/GHA03yYD0dFzKbA+CKCwowMn9HQ4HC8I4AAwILBTOw3E5AwQJDqwztrRYLGyWC2wu3WKQMpCweJ5n3ZU8z7O2uNghOt9isXhROE2Ng+s8x+NR1WrVEoIwDE36QDIgSUEQWMFqGIZWdI1spVQqKYoiC7bJZNJmYSwWC9tzik7ZYyQOaPm32611qapUKmo2m1bbgO6YPWSdJVmRMAAEUwurhu8QfA+Hgw3z5DMymYwxwqfTyZIR5rCgiX7//r0ymYyq1arpxumsxjoDHplM5mI/ma+CvUm6aCd8PB4tIePeACP8gRMYNPx8J/vA0ElmiiCbGA6HBkT4HFpySQbQSBZgFbEht/DZHUgL+4/kCC0/AOEy3QRvEleXlYcNZp/dhG6326nb7WqxWFzUhrj1AMReCorxe5dFxK/53dPpaT6KG2vi69OuGCNjjIwxMsbIGCO/DIz82EvXL37h4tiawMVC4LwEIx6et0AWTpIxb+5R5GazsaNrAityC4IdwILzcbTM58/nc1WrVVuk+Xyut2/fWptSjJH7gymDxWMBmWOwWCysGLZQKFjrTXTcu93ughEARBuNhqQzE/L4+GgMVTKZVKfTsSNntMU4GYENYwVkAMH9fm+AgKZ3uVzasTiFvRgDwUZ6YmwwaoL2w8ODMTXNZtOcnuJBAIP1cvW2BHvWlHVj33e7nbV6PR6P6vV6Wi6X6na71qaXe0skLgfaYV98FmvB3vF9ODrSGgp/YUWfPXumSqViUgySEkm29wT9drttQRibg92kNSusMKwONRXYF+BDXUKtVjNGk3XHjmkjC9C7PsHPsL//8R//oSiK1Ol0rJAXWZEkYxsrlYpNgc/n88ba4U/r9Vp3d3cWxMMwVKPRUL1eN7YNW0XDDsAej8eLuRrb7VatVsv23/fPxeWVSsV09e7zFgoFK3hNJBLWTclljrBf5Ee0lXalGKfTuSsY7WzdOMQ+07Y2iiJj/oIg0P39vfkts2tgWSmyJzngz4AMrapHo5ElLPgJEgueD8aQpIG9TaVSVjtB96XNZnPh7zwnbN9ms7kAl2KxaInI8Xi0xOpwOBjo8Xx8ryuXYJ2J59ITS/hhzMZX4uvnXzFGxhgZY2SMkTFGfhkY+bHrs5xw8abMMS9vlxzDS+e3X5cF4fd4Q2QhKECUnqa++75vAex4PF68zfIfAc0Neq4Olf8Ps8VxPd8NKwKD5wZfNp2AlEgkrA0pm87nS+d5DTg4a8IaNRoNjUYjeZ6n+XyuVqtlb+qlUslADKdgrWAA3E5TGI77777v2/BKmAsc0j12dcGdY3468sxmMx0OB9VqNdPH4rzL5dLYGBgA1oy9QAssnaU0zC8AmDKZc4eZyWRywUrB9rh1B7BNOE4YhhqPxwYYURTZ92YyGfsdpDKTycRmXXDEDTjDOLOG4/HYJAZIRGBYsIn5fG7FtxToZjKZiy5PrVZLz58/1+l0bldKkGi328rlcrZ2rBFH9K4Ofr/fG+iRaDBD5HQ6D7f87rvvTKPNZ5K4ADqAU7FY1MPDwwUjDnNKMgbzid6ZVsduUCZA4h8uk8exPb6USqXML5ETACz8HQkBvsj+E1CRQ/BdAAVxAlvBRtG7AzwE2mKxaN8lnesL0JtTgM36uj7C/ruf5xarU9NAHQhDRMMwtP/vJqmw2NgNySI+T20KtQAkUMQe9g9dPKcVyWRS1WpV+XzewIO9ItEiXpMsk1AANO5pCzER32K/4+vTrxgjY4yMMTLGyBgjY4z8qD7E87z/S9LrKIr6H/kZJZPnAX5hGGowGEiSsVeSzKDQihLEMAr3zRQHZBP4Dhgy3spdVo+Wq4DXer22Y0OmqvM7ODfHjCwugY/7ZQEJYhh+Op22t/1EImHHrhgIUg4MBN0pzsbzwJDV63W9fv3ajJPiV0CUYIDswtXGSmfWZLfbmRFLMkBwJSYUw/q+bxpV7pn/TwtZvov1D8PQGAm0wNK5WJh1QIpRq9XMuTFkHAKGCoaMQAE7Aagdj0crSOQImmN89vlwOBgLSwBAbsL6uA4Fszcej629Ky1tU6mUsbyTycTABXsjkBG4M5mMBT9AZjabKQzPtRiDwUD5fN702bRQBhTdWR10c4Ihmc1m1n6V55Zkg0lZS4LYfr83PTXrGkWRtdwNw1DNZvPClg+Hw8Vx/Y8//qh0Om1ghk+xf/gH95hIJKwwnSCIBAif5+/wMbTnJBbM3Iiis27a7ZAEg5dOpzWZTEwSgtwJZhP9NskB9+6eIBSLRS0WC5PflMtls21qCXK5nHWFItBKTww3Ng57xRoEQaDNZmP3jw+58geYND4TmyK+ELvQ8OMHh8PBmHISD2IYiSZAyt/B6uGz7BlSDpcd3+/3BmjsIX6Mj7kxlvt1k/34ijEyxsgYI2OMjDEyxsgnjPzYS9dHT7iiKPp/PwYk//kz9kWu8btyA3eB2DCMAAmEJCv0o5DN1WASPFkoWA1AiuCBERHkYWLYqDAM7YiXoAcjwO/DhhAQWVh+xj06RhcK2+JKFjA+9w2do/8wDI0Fevv2rcIwtKNV3s75Tnc9MRyOh1OplBXerlYrLZdLTSYTeybf900y4BYCEhiSyaR9jvTEsrryBOanhGFoQQCNfxAEpod293c6nV4wechMSCQ4buY4ndakMBqJxFMhLmuWTCY1nU6NLaIIF+0zTsAxvsuuTqdT03oj/yD4EJxxPJc15l6Q5wDMMDewky7oIxPBjiki7vV6BqA8K4mYu/4EWIYyep5nvsH9UhzstkmFQWY+TqvVsu968+aNdYlyJSLX19fGksH8kgRxj9JT0T/MHvuHL/j+uQtTvV5XsVg0JowABrC6ds56wQqHYWisaz6f1+FwMEkKiUaxWJQkY9SJLwTX4/Go6XSq/X5vcgeelc8nbiDHQgvPCQFJLp8HIJJAIKVgTUh88ZtkMml+AsPtAgP+zJ65iQX75SZ2FMi7+wYgAnLcu6s/Px6PF6cS2A73AAC64OYm0viC+z38bnydrxgjY4yMMTLGSCnGyBgjn144/7vrs0gKXY0zGw9zJD21Eua4j4chYHjeU8Eob5MsPAvIQ+NcsGQsIm/ZrmyBzk0uY8FbPw4jyY4LCSoYFs4FqHFUn8lkrAjyeDya/pf7x+lZA4pscZ5araZqtar9fq/vvvvODHi32xk7ASAwR4IZC6yNJDv6z+VyFtRhtWCgKNClaJV75PfDMLQAgHYdffbhcDCtNfvrTlknALiBDHnFer22uQrr9do6H3HPyCywGRy81WqpXq9bgGbdcAiGfTJdnQAFexeG4UVdw2Kx0Gg00mq10mKxUKVSsQGTJCTYgeed55VgtzBIgBkFvBwtY48f6vZLpZLJNPb78ywOV+LCsD8kJNhlOp22IOUybbBqgDU27fu+ptOphsOhJV/UPpxOJwVBYM/P9zAt3rUjNP3J5LlYHl/ATrAZQI3iZ/aNjkuu1pw9Zo1gjNCowzoS8EiQAElYZXwJv4chhfF3WVXiDvIdNxjTFWwwGBh7j+wDLT8nB3w/rK2bFBPkU6mUBX4K2CXZM6TTadP2k+y4NoesC2YzlUoZ+0diw3diy+6MD+yPBOdDoMCv3D+7AAYQs27EV5dl5PvdOM//j6+ff8UYGWNkjJExRsYY+WVg5MdIyc/ywsXbO2+OPBTH7uiKeZMEuBOJhLFcvLEjRXD11Ry1ujpO3ipxKv5XkjFybpDDIHmLZRYCn0sRLsbi3uNut7to4UlQlmQFljjFx8dUvAAAIABJREFUdDo15oOg6xZ5ugCAobE20+nUjAOG5urqyiaJc2xM8CDIJJNJ9ft9C9Bo3jEYjnCRPWCEH0pS3KCNlIACSJxfOks0xuOxrUE6nTYZRSJx1jWjpyYgEwz5OYqZYSpdFot9JjC5DkOBLQWphULB2Eja27qsMFKbSqVyAer7/d72n8CI8xMkAEeYQO4Fh9rtdsZOEmhyuZyKxaKGw6EB8ePjow0KPJ3OheouWMIKEcAJvPV63fyMWTD4Bs8GW0RgOBwOtu4APsWwsKWw2/w+98/zEawBZf4DDChCX61WlgTANiPxIQEheWPfeBZiBYDK2oXhuXOZy9Div6fTyb4bf4ZhA8yxd4p2AXFADZ8cjUbGRGKjJCSe59mMGvYEeVUul7MEk1krzOUANFgHYgdMIBf+zj25cgXAEPYdJs0FJ+ydhJvnBHQ+ZP5IhGBoWUvWAvaQxB6AYV0NLP4T2OLr064YI2OMjDEyxsgYI78MjOTl86euz9KlMIoiAwGAhcVhM3hDdI2V4zr3zdwFEh7GXVgCHkfxGC4bB5vEG6vLAMGscaQNewjrwPOwyRQuwiAAbI+PjxoMBnbftVrNvpuOQjB8HDXyth5FZ40s60AHJ2Qiy+VSrVbLDHu/f5pQD2PJ/cNKYITNZtMkGNVq1WQM6K/L5bIBZ61WuwiQMA6Hw8H+F+ccDoeaz+e21mjMcW7XgWBWAW1JJk9wAz5H2QR9tLYEOYIRbAYgwd8RxAEhZla4DDL3QoF2IpEwppWgh3PAVnieZ0fy+/1e3W5XURRZMTY1CJ7n2TNwzwRUkonBYGAsFADLvXEkjiQB8AfE3OQIppnvn06nF8+G3AT2BdYUMGKdASrqGZDYrNdrkxCx59g6QyiZ28OwR7djlptMYjswYewZdlar1Ywdc6VSuVxO7XbbBnJyr7BeyE9gx1yJCfGB9Sc+EEdYR/T4icS59TKsGaBG0gcbSxxIp9OqVqu27zCGo9HI4htgRqE4n0cdBhIeaqCIJwAAReVo5vnuDwEDMHGZPGIjzw6wuIweTCp7A/MJYLIPgDz/7mr++fz4+rQrxsgYI2OMjDEyxsgvAyP/V0+4uHgQtM68qROocAgCu/v2zbE9VyqVMkPlYd1iWumJOeHnMSAYExbydDpZEGYRYX5gcNLpp3ahfAYFunwHm+l55+nvMCIcl6IX73Q6ms1mms/nBnIchXOvFM22221zvpubGxUKBQVBYGzYer3W4+OjTVNnTRh4hyZ+uVxaNxm06gRL5iq02221Wi2752w2e3HMjJPCXmSzWdP4onvFyThednWsFOe6QZYAzt4UCgVjMAmMAAOGjMESTIrFommKcVxAEVDdbDbWxWgymRg7yf9K58LISqVi30V7ZECbn/E8zwpPSRYqlYoFIRwfxgMnxuETiYRarZaur6/NZr/66ivVajW1222TXPA7BDOCNnURODSs0vX1ta3tfr+3QYWuTINnoBMPjBk+VCgUTFOObVJc6zI52A9BmOeExSKRo+CXAAmT5vvneTk8PwMUSQBgZqUzY0QxMwkSa4OE5/Hx0favVqtZMbQbR9zkhgDO3rBnlUrFgjgMOqBJMshzk/whMTgej9bdaDQamSyIOhi+HxDie4lN2J0kk3W4JxDEK1ha4gb368ocSH5d38V3+Dy+g2dlv4mhgA6f6/7Hz7onD+yJaxPx9WlXjJExRsYYGWNkjJG/boz82PWLX7g4HseoMDpuGgBxWRVujsV23+L5PTbHLch1N8fVusImsLjuESBOwhEkjJj0FLzo+uP7vgHLbrczo+VzwzC0wMkmMB+Eo2lmFGBgvG1L59kMxWLRhhjye6vVyrTU6XTapBAMtHt8fJQkK/aNoqcJ9oAGYNZoNEyS8Pz5c2sXCqBxhM06rNdr9ft9k4BwH/v9XtPp1AbfEUDdt/r9/tyRqdPpGAMEi8vvuIMw2UekJ2joU6mUdRPCeVw5BYwW+0Y3Lvaz3+8b6wAAlctlY0Vd2YXv+8b0NRoNaysLu8IMFBwONgmnxaESifMMFIKcy1CtViubVL9arXR1dXXBPnqeZzUSBJfD4WDFvQA+vkXBKvtdq9VMUsGRP0wkvw+gtNtt3d7eWiCi4Nb3fSuSP51OqtVqurm5MYbI7VSE1AL5UzKZNKAAqJCiwP7CLmFr+XxetVrtQhdOm2PugfUnuPu+b3YB+8ysEfe+kFnwd/wu9gX7RAKCzIP7BRRcQCa5zGbPrZGDIDA7xD4bjcbF78Akw6oC0tiN+7sEedaDpMdNYDlx4PfcGOh+ntuJivvhO7F5WDwSuQ9jLfZOHHZjqPszfH58/fwrxsgYI2OMjDEyxsgvAyM/dn20LfzPvVytIw+y3W5tkVxJA4aI0X14NA/rQvBaLBZWPAtzkEhczgKBvWCxXZkGm+we97mBnADFWzdvxrAL+/1e5XJZk8nEjAlpgud51hUmm83q4eFBr169Ur1eN6OBQdlut8rlcqYXzmTOszaQi0jnDlTVatWKeD3PU7Va1el0npWB8xHENpuNzbRwW/ny9l6pVMxBOYpnj2AfPpQLZDIZG8QIE1soFNTr9UwrzHEtcxtg9kqlkjF6MLDtdtv2drVaGVNzPB6tQxGOxBG7q7tHZiPJZAkEDFguGDuO9LFBmAy3yDsMz52v0PKjwXbZmmq1aowsNuMeocM0J5NJG2LIfhGsJBlbCCOLnWMPMKWAFhp+EgSAAbYMdvVwOJjNkURJ50CELcP8kHDxDKwtweZwOHeQ2m63urq6Mp9mD6WnQlj8jEn2BLv1em32zWe78ynYD5hyEprdbnch/YAJJjACrolEwiQnMJzYMlIJ4gvyJbpQeZ6n4XBoMgYK2t2EF6kQzCmJoCRjMLHv7XZroFEoFDSdTi/0+Ng9+wrTDXhyny5TDRiSjLgF6qyfK69gD/k71g2QJQ6GYWi1IawpfwaQuAcAF5Bi3d37B5Dj69OvGCNjjIwxMsbIGCN//Rj5sesXoyfBHHaoUCiY07iLy9uwJJMawDzA5gEgHOFyVL9areyokw2GYeEzWUDYPhYDo3CPhQEXFpZjdwI1QXwymVig4m08iiK1223NZjPl83lNp1OVy2U1Gg153rl1J8WnMDt8TyaTuWA52u223r59a0fgjUbDdL8YcaFQUKfTMXBEw1upVNTtdlWtVq2jz36/t+NbGBnYA4Lm8XjUu3fvVCgU1Gw2FQSBGV2pVLKhfsgZEomEgcDt7a3m87kKhYIFGZyC+RcuA0BAciUF/DuMDUEVNpLAStDGgQBlAlEqlVKv17NBluwrQEPB9263s04+2AQgVqvVbDYIQSydTms+n5ts4HA46PXr1xfSHlgvJCwwpCQXQRBY16xut6tut6tvvvnG2CCYSZxakunPgyDQdrs13TjH9LBIzEbhXth//IggjQQEAEaGQK0A//7NN9+YxADW6McffzRmlCApncETXT176hZ1kzz1ej2t12u1221LWug45fu+yWh4Bhhy9PvEitFopFQqpWazqWTyXPSOH/Oc0pkVT6fTms1mGo/HFyBG4X2z2dRyubR1Zw1hto7HpzlCxCYAh+5SJMEvX75Ut9s1xhhdOckp/pxMJjWZTAz4XWkO4O5+v6QLZpNE07U7EoLD4WAJB3HQZdY/9DOSPAAL7T9+DENHQuBKOPBzaini69OuGCNjjIwxMsbIGCO/DIx0lQgfXp/lhatYLJomlMCFAeM4MDA8qPRUfJjP541RYfFwAtgA3iDdt9RUKmWyCd7ApbOswPd9OzJH70kRsOedNccc5eMIMCzc03A4NAAJwyd99XK5NG1vKpWyo2PXiNy3/263qzAMjT1Ip9PWZQZdcq1WMyaEoMPled5FEF8sFmq1Wmq1WrZeri4fdjCRSNjvZbNZKxh2n7Ner5tuGGYClsU90p/NZrq5uZEkO4KFQSNxQJcMo8L+0wEKJ+H+cAaCjauLRrbyISsMq8NnbzYbYwhLpZKq1aoxmeinF4uFNpuNBQv27HQ66ebmRt1u14CfbkvSebo97BhMM92osOnBYKBWq6VGo6HJZGLMCjIP7IxAnsvlLAngZ9lrGBVA0x1uSjBIJpOq1WqSZCwTiRX/GwSBnUZgE9gxdppMnlsrVyoVnU7nNr2SbB+QMcCYkaQBmgQa6Ym9p5D6dDpZi+blcql6va5k8lyoPJvNVCwW1Ww2lU6nrV0u9w/gTCYTSxAPh4O1rEUuwd8T/JbLpa0hDCfMFs9CEGUt9/tzlyvf983mJZld7XY7tdtttdttA810+mnQJEkHa1Iuly1xdpO64XCo4/Hcvpo14jvdRAoZBWuCVtxdbyRH/Nll3Ug4eH709O5Jhe/7lryTXBIT0awTJ0kSALqfI5mIr/96xRgZY2SMkTFGxhj5ZWDkx65f/MLF2yEBH401N+MavCtl4DgclgqjpePOfD63z8UgWCz3TZQFcI+hYTESiYSxRq78gTdzWBIYMTYG/SaFp1F07gDT75/nW/KGj6xhs9no66+/VqFQ0Gaz0fPnzxWGobExo9FIxWLRWA+e9+HhQVEU2ZTzfr9vQIMkAlaGI9jdbqfRaKTb21sDW47qWVMYLdin1Wql+Xyu+XyuMDzPFPE8T+Px2JyoXq+bHIA12u/PbTLR3O/3e2PyxuOxvdEDiKVSyRwjn88bA8TzzGYztVot05APh0OVSiULJq5GGKagUCjYcT5yiGazqdlspnK5bHYG+8aMDMAZBuRwOGg8HhubgwTk66+/VqPRsH3hM0kYJNm+8ow4LKzM27dvjV2Ooki3t7fGvqVSKb148cIkD/gMDBE2x7BF1t89xneP09kjWL4oivTixQsLLuwTrLpbB7Df7zWZTGxtaN2KbISaCIIdgC6dk8B2u63RaGSAgM8TbLj/6+trC6bPnj1TPp/XarUyXf18Prc4wfePRiNJTy20qWshMB8OBzUaDQu4h8NBz54902Kx0Js3bxSGoV68eGHd0FizKIrs90lWSAyQBO33505b7Euv1zMZEuvYaDSMJX94eFA+n7ei78fHR5NmkFQRC5HEAKbL5dK6OVFTQFCnhgbfdyVlrnyIP7sSGBJEkl5+DpaP54YtJ96eTk8dmEjY2UtiNTGUZCK+Pu2KMTLGyBgjY4x0Y0GMkb9ejPzY5f/hD3/4JViif/qnf/oDg/BwQAwYRo+AyJE2b4V06gFUOKLzfd/e/mFZ+Bl+zmUROALme9lMpBu+71tQ4ag9iiILEPw8bFSxWDQmBmaRt9jhcKjnz59LkhkqbGG1WtXDw4MymczFNHGOf6vVqrEvfMdsNrtg0HK5nOnbaV2K1jYMQytgbTQaNuAQJhNmAGaItZJkE+d3u53K5bJ9B78P61iv1+34nCnsu91O9Xrd5BhupyYKwTH4/X5vTCy1BqlUSsvlUovFwtioWq2m9Xptx9fVatW66xBUkOGwFrvdU4ceijpLpZK63a4FUZ6FlqjcH+wV63Q8HvXnP/9Zw+FQf/M3f2OSGRhInFWS3TP2kkic55EgKeE4Gq36brdTv9+3gHBzc2OOLZ2Z0NVqpWKxaKw2DEyhUNB2e543A0gCkJJsSCM2SZE5jBzFqDCCAJIrJ5rNZsYUk6ghd3LrH7AHLgq+SRKiKLIgut1uDQiR8ywWi4t7x39Zl9FopMPhYHUWSGH4/4vFQqVSSd9++622260VRrus/r/9278Zg3p9fW3SpmfPnmk6nWo6nV5Ilij4hlErFosKgsAKkaktoDXv/f291Y0cj0e7z0ajYTUbV1dX2m63VmieTCbt3gFA4gXPvtvtLtbSjY38OzYIYHOigayHOEjyTuAncZV0kViTHCFLgdUEeHihcuVOMLNIzTzP02Qy6f7hD3/4f34RcHxBV4yRMUbGGBljZIyRXwZGLpdL/eM//uP//VNY8Fkkhb7vm3Pncjlj2dwOMYA1mnWO6KQzo+a2oyTwoyWGeXMlARyd4ww4JItFkSIbwv1UKhXTk/L9p9O5YxAbxiahnS0WixYQYaJgHG5ubhRFkb3tF4tFjUYjO7an4BW9Kdr41WqlbrerUql0MQsC7SlBBV06Gmi0wvv9Xu1229icTqdj80RccHXZTQwLmQmBaL/f6/Xr1/ZZxWJRi8XCZC9BEFjbWRym0+lotVppNpup0WgYUOXzedN8E3TRvk+nU5Ou0PZ1Pp+rVqsZ6BEseO75fG7Gjo4epyEpQYfvarjRvsM8SGdpSCqVsnX67W9/q263q7/85S/667/+a+uExb5JZ0kC+uRyuXxxFL1YLHR/f2/Ovt1urW4BhjSTyWg6narf71uihN6coE2ygf/sdruLZINAAxu5XC6tIBaW6HA4qNlsKpPJWFCH+UG6Qicpl4XBz5BmkBRxbI7Pksxxn+jOAVH0+uv1WrlcTtPp1KQM1KOsVisDVGQhrDf1HRTe5/Pn+TsEXUkW/LfbrRqNht68eaPZbKZSqaRMJqN3797pdDrpq6++skR2u91qPp8bQz4ej1WpVC5aPi8WC0t4YZ5hoj3P08PDg4IgUCqVsnsjBgRBYHZOQuj7viUzm83G4pdbt8O6cJIAQ+2y9cQhkjiYefyBJAEbR67BfSQSCVWrVZP8kLAAbiTVsMWsMSwg+4Qdubr/+Pr5V4yRMUbGGBljZIyRXwZGfkwF8otfuLg5tNMc67utQwmiLByBniN6ijcrlYp6vZ5JFnjTl2RvlRwPcmSNXpMOL4vFwsABZ+U4kWPw4/Fc/MfRMCACo8jiDgYDVatVVatVzWYzLRYLJZNJM8jNZmMabjaj0Wjo4eHBptpLsuJbmEp08TCU5XJZb968MWNmrXibJ3hiLLSTPZ1OxpShz63X63bkj2SD5yX4Iq/AcDebjbXuBWhw2kQiobu7O2th2mq17BgX54ZRq9frJnfhuH+73Vphq1v8CThT6LhcLm16PckBwXEymVhQBGyRacBiUTROMsNRMGwaLAcAWSgUTCbB0ErsB8BCrsM8EuQr7DV1DtjPYDAwGcTd3Z3evHmjcrlsumCCOTUWPBMthtPptIIgME0/iRRMHMEbBqfZbCqXy2k+n5sPsfbuUftut7OCcJg+gqDLrBNkZrPZhWwIWyK4EtypLaCYHd8nGWAfWWNXd48NYPMUWWcyGeuAlUwmNRqNNB6P5fu+Xr9+rUKhoOfPn2u/32s4HNpaUQwcRZHu7u6sfiSTOQ9XXS6Xajaburu703Q6le/7NvMFWQ5d3ZCNcHIAc85cE4DIjW/L5dJqF1ztOPaP/7rSg3Q6rYeHB4t9rCt1DEhouAj+7AOAQoxkH7FNN4l3texujZCbwJJ8RVFkf8+fXcYwvj7tijEyxsgYI2OMjDHyy8BIbP+nrs/S49ct3OMYDmdhcXhLRBtMgR0sGgwGumbeHo/Hox2BptNp06fy0LzhUpRLJy2YPBaFgMPv0apWeiqQ5DsTiYS9KR+PRwVBYGwczrDdnucmfP/992q323ZsnM/n1Wq1tN1uTXLB/cCALJdLm2LOkTGSAJjCzWaj5XJ5IU2oVqv2tg5rQcHr8+fPFQSBZrOZFb/CKnqep36/b4GeI36YiyAITJKx2+00Ho/V6XR0Op2MmZlMJnp4eNDd3Z0xE91u19gc1hVGkISBfaYL0mw2Uy6XM3aMZwDwAQOCymQy0X6/1/X1tTFCy+XS2LB0Oq12u63VaqXlcmmSizAMtVgs1Ov1LDEheDJUs9frGav1L//yL/r9739vNkeyAwPU6XQsOeHoGIBEC73f7w2I3aNvl3Epl8u2J1dXV5pOp9rv95ZsAO7FYlHValXb7XmoIc9AATT1FcPh0GQjaJRhkFz2zWXNAXHuG9ACDGj3i52n0+e5N0htCEwkdLB2gIGki+RuvV6bjImgiFQjl8tZ1y98kHUlCZJkUiQKcd+/f2+yG9hpbH65XOrm5sbaRUvScDhUv9/XN998o+12azYwnU7V6XRsHfg+ng1wuL+/N7lOo9EwkCfhmE6n6na7evXq1UVQzmQyliCRJLpAe319rdVqpSAI1G635XmeJYjUwJAIH49HA18SHjqvsW6AID6w2+2Mlee0g1iLP7gadfYAcHFlaUgr4uvTrxgjY4yMMTLGyBgjf/0Y+bHrFw8+JjCzWMlk8kJzS+CHAQiCQJJMNiA9aZH5DByB4I8jcAzJ78Lu8RadTCZVqVTMcOfzuRkDbACLyvdJZ+kBAZv7hW1BA0wQ4a2dYlgYgH6/r/F4rCAIlM/ndXNzo0ajcWGg7969M7ar2WwaG8UxLvKBfr+vXq+n2WxmbIJ0losMBgMDPYIbMgaKYn/88UcbLAh7B6uEgXCkvlwujQGAMaBA9cWLF8ZOUAjc6/UUhqGm06kxTRR1wqpSBJ1Op23SOyxJOn3uulMsFm0tf/vb3+r6+trYTT4LNqPRaNgznE4nY9vS6bRarZYymYw6nY5JbRgECPPmBgjP88yuYCuTyXNXph9++ME6KQEY6JUPh4NJP2CcYWmWy+VF4SzsL1PfD4eD+v2+FouFJUc4rMtEh+G5SxM6bOmJseFny+WyAXy9XjdmERA6HA4XgYPuSd1uV+v1eYAnwYPjc36Wglh8Aq07jHihUJDvnzstAdrYPnZAcIWNIhhtNhtrHw1owabiBzBcMGa+7+v29tZ8sV6vq1Kp6E9/+pMVZ8OeJhIJK1z//vvvNZ1OjQXDvl1JHIXpr169MgDE7mAoYfNJckajkTF/JF1v377Vd999p9FoZDNH2FuYLyQosLWsNbGrVqup0Wgok8kYy4l/8jMuoNAWm8TQTRgo9iWpYD+Id+j0AR86PZHAuWy3W39xOp2s1iO+Pu2KMTLGyBgjY4yMMfLLwMj/dUkhX0TgABS4cHKO5SaTiRkNzBbHf+gyefvlrZE3fIydN2EYAXSvLCZMEgEUPTtsGgbGUbfLsrDZYXguwE0kEtbekrddupw0Gg2t12tNp1PTr6Nxh2nkz77vazweG9hNp1NjOJB8MM2eDjl08eH4nyPwzWaj4XCoIAh0dXVlx+XValWNRkPdblfPnj0zpoA3dd/37fg2DM/DCAnI/X7fEoJSqaSHhweNRiP97ne/k+d5ajQaGg6HkmRAXalUDODz+bx1mXKlBux/EATWnYdgBkBzlL9YLCygYhPop9vttk6nc0eoQqFgR/9oo9FvJ5NJs0fsC6BFKsKxMQW+t7e3+uGHHzSZTNRsNo2FQ6KDdp1ZJe6xOGvseZ4BDh2aTqeTdbpC/42dMmCQNSLQoXXGdpvNps1/4Uiez4Mxd5k4agtYXwIcYI2GmcSB9YGlg/3h91OplCqViq3vaDQyxo+aB/6XxA29NbNT0Gmjl+Z+DoeD6vW6/TuBkeDmeZ7u7+8tGD88PBizCutbLpdtryVpNBrp9evX+v3vf6/vv/9e+XzeJt6Xy2V1Oh153rlF7N3dner1uh4eHqw7F0A3Ho91Op1sqOzd3Z1qtZqdUJCMXF9fG/DSvQmAJnlE8gSbjdaf+0Djnkwm7XlpJU7CA7OG5Ivkl0RbkoEVP0syiN+TmPL9xDuSPVfqgU2GYWiJCjYaXz//ijEyxsgYI2OMjDEyxsjPIimkVSOsBZIHdJHr9VrS+QiVKfFojCXZUTcBlTdW6Uk7ztEg+mwYIelpNgObA9PGzAjmjCDrwIHQ4boSCwBqOp0aW0E3FQBsMBjYoieTSQsakkz3jpYVho3jezozLRYLc5xms2l6eoIxDggwcw84PAGVAElA5TmfPXtmg+eYtQGgSk/sDAC93++NGcjn87q/v9fxeNTXX39tR/rb7VatVsv03eVy2cCNfXXrByQZG9tqtcwmCMSdTke+71uRLWsahmc9M8ESo0fiUq/Xlclk7OgfppZghIYYfS/PDYCt12tbQ7fYslar6Y9//KP+/u//3mxpsVhYcuFq1gFUpDbYPU6635+nrtMSWdJFQuAOmUylUgbISAXcRILicBiqfr9vwfvu7k5BEFhNAnZHm17pDFwUhVYqFT1//lx//OMfzedIXE6nk+r1un0fvsJsjMPhYFInOovB5hAI8XdqOfh3l4Ei8JHUATisKTpo2Nv9fq9er2cBDn064I5cgHu9vr7WX/7yF+VyOXU6HQ2Hw/9SU8Bw1eFwqMlkolarZfHGrSMAAE6nk25vb5XL5dTv9w18kRgQCxqNhklEiE2JRELT6dR+DhaX4uLVamXSi1wup3z+PJiUxBidO7p4t3MdrC8nCpxq8KzI1gAI9oQEGwaWi3XCf0lkYf357Pj6tCvGyBgjY4yMMTLGyF8/Rn7s+ixdCjm645gQY8BZeNPnmI4uM64enMXh7ZJAOZvNJOmi0JEuL2g0eXvlYSkuRcZBsR8B+XQ6WTtOtKM4JxsOs+f7vhUKrtdrTSYTM+DD4aAgCNRsNu0ZkB9gkO7RJ8CTSJw7EOVyOZs0fjgcjClEV430gy5OzDx4fHzU4XDQjz/+qGq1qnK5LEl2vA+Dulwu9ezZM5NiYDQwXjgCQMqxNPr46+trk3DAEPJMsGnIMThGf/v2ra0fen5XCsNwPjoxsc7cPwkFhbfIBRKJ85wQ1kqSSQVwegIEzpdOp+3eAAcYHu6B4lyc1fd9/eu//qv+4R/+wdYSrTyFxhSQwuTi8LBbfCf258pR+H3WHGbFbc9MQgOzByCxdtR48LPM1chms5rP59putzY3p9vtKooimzvD+sG0BkFgNpBIJKztL0Htw2JgmC3ulQQAYGe93OTM8zyTSki6WKMwDI2hgkmCvWOfWZvr62uTW1EQ7uqqsQ8Y9T//+c/yPE+3t7dWt5HNZjUcDm220Lt378wG3PUFvIlHBOLBYKD1em3gj0+5pwGSLGF1ZSkkHHSV8n3f2Lft9jzcsVarmdSHteBnONUgaWefKKiH4aOoHz05rBvyjw/BA6bPlc+wV+wLSdD/CVDi679eMUbGGBljZIyRMUZ+GRj5seuznHAlk0/DGnFm9+9V9igxAAAgAElEQVQJGgAPjAdH4YAAhktwSCQSNpiQz8TJCH4EB0Aql8tpOBxeGAGLji4YbSsMCp/HZuEM7nE7xsWbcRRF1kHK8zxzaAp5YcyYeI6Gl3amQRBos9nom2++sWPww+GgwWCg4/FojJ4784Aghm4UFuXh4UHPnj2TdO7mNJlM9Pj4qOfPn2s+n1t7Up4BjTDMHQCaz+etyBSmAgCZTCY6HM4DIpvNpmq1mulk3cAK60lAdB0OWQ1rXSqVjMnDAbgI7KfTyVgx9LlIBAi22+3WBvbBVsACcRTNM8PeIVGgaJnhg+jIf/jhB11fX18wOW4gdbvYYJMEfSQGsHs8O/8RcCRZ5yzqB1wmJorOdQswcpVKxbpOIaEgmO92O83nc3333XdqNptmy9gue4Hd05LX7f7Dz1K4j4YfjT1Bh+fm33e7nRUEw0JtNhsLnjCR7C/7ARvrSq4oeB2Px0qn0yYbgemiPsNNAlzGiWemC9Pr168lyVhn3/fV6/W0Xq/16tUrkzytViuzM2yeNdxutwZkSKvm87k1AYAFJ3nAXtzCYHTexD8YRNaE5BbNObpxZBauhMutT0GXThyCbSNmep5n7CM2i/98CHgwx+wvvoaNk0TH16dfMUbGGBljZIyRMUb++jFyPB7/9zjwcZj4P1/ox3nDB0AwDt6AWVC0nTgBGwbTwia6BXm83fJm6hbtSbLf5a02nU5rMBjo6urKJBkYOoXBBJ7VaqV0Om3OAwidTk/tVTebzUXbS+ncWpap7vw9+mT0vYAV7J87i4PgiRMHQaDpdCoGZG635yF5dGXCibn3MAytANLVcRP06DDDsS36cd7qYWzYP/4jUAHidPGZz+eqVCoXsxd4m3fnLDSbTbMDDBG5Bwwsc0oACJgLGCiMmSNpSRcMMcfrJBJMrEdXTnICG0TAwRGRUCyXS+sOhB0mEucWv4PBQO1221gpHBapD4CAE1OEDMvCz/Eck8nE7Ipj6kwmY4Wlo9FI7XbbmBPuF7vC2YvFomazmfnKfD63Tk3YCsy3JLM1kijqJ2BkKdAmsLmAPBqNtN1u9bd/+7cX987/sjYkOdgL60JChawB20TewNq7NSqdTkeZTMbAhBjjeZ7evn1rLYRJDAiGBEKCqMu4DodDA9HFYqHJZKJ6va4gCCzZI4C3222rYSAR87xz++bZbGbdx8bjsd68eaNms6lsNmtgxZ5LstggnWfVYO8wZKyHqxvHLn3fv0i0jsej2QLx1a17wd7dhBeAZz9ckCdm41vYPn/vJtkweTD38fVpV4yRMUbGGBljZIyRXwZG8nk/dX22tvA4eiqVsuN4l6nASKMoujBaHlzSxULxxurqKGFJ2BB+l4enaxOGdDqdVKlUTOcpSZVKxTYWoyMw8gxsahiGNlTPLaDMZDLGbPH7sCsEe9grJCNojl3Dh3Hq9XoaDoeqVCoqFosaDodm5AQHgihMJFrUKIr09ddfG/MxGo0MdF22CxmE759nG7gtVAm+OO18Plen0zFm6/r62lqrckzveZ5KpZKCIDC5CY60WCx0dXVljEUul7PhkRzLw7pFUWTtXnk2gmwURfZ7rgwEoyZpwL4IuuyRJKsR2G63NlQQLTeSAO4TtrhSqahararb7erm5sbmy2CPMGySLjq38VmSLLFZrVbGdDLrAh+BTSPBmM1mNj+GtSepqdVq1iZYkt68eaNvv/1WDw8PajQaBk7Pnj2zWS1BEJitcQwPUzcYDKyomGcicYuic/cdOlEh7XH9ked36wCwdzTQtCd2NecwRa6fI2nCd+bzuYrF4oVcKZFIaDAY6OXLl+r1esb+0lb66upKm83GtN4AKDZ6PB7V7/dVrVZ1d3enMAw1Ho9Nw1+r1YzhZw1YF4qPAVAaGrD/y+XSZhE9Pj5a7IGtI15xMuD+mTiITSyXS43HY1tvlz1jvQBf/D+KnmYckXSRMPK7kgwIAAyYQTfuuacpLvuOtAdgia9Pu2KMjDEyxsgYI2OM/PVjJD/3U9dnqeHCiSkuI3hyfIc0gjdEWBQMxdVPZrPnFrFosnlo9LDoNmHF+HdJdsQoyQyWOSHuUS336/5Zki0Wi+syiLBPvOHSbQmGyffPha1o3Fkb2A6O0AG2+/t7PX/+XIfDebbIq1evtFqtzIHd+8WAYENdowIIORJvtVqaTCbabrfmlAAAXW64MC6X2WJPmJ9CoIB9oevTixcvLICjuQeMwjA0BgR25vb2VoPBwJiJUqlknYFgOfhZDJi9ZL/RDjNXASYwmUwamLLmx+PRkhlXsgGrBtN2dXWl2WxmmuN0+twF6+XLlzarBRkFoMNeSrKiTJhNwAW9MOxkuVy2YEkgIPhh1/iA53kXs2JYf4Ir3azoRkRxOZ1+OJKH7Uun06rX65pOp1Z4HUXnAmtmdOB7m81G9XrdgNdtKUuiQXBarVY2iyYMQ41GI63Xa2tRTHJJh7FE4txJCzkNfo//HQ5Ps2iQ9/j+06DRWq1mbCl+iT0hEQDQCaa9Xs9Y9n//93/X3/3d31mR+2g00mKxsOd3Ez3pnFCyDrDTs9nM/LxarSqXy6nb7Zofu4W8MJKuhASNPEywJPu++Xxu+nuYOmRRSCNcTT7rkEgkjOkjqWY93fgImLPmSCdg6vg3fo6kH5mMy1rH18+/YoyMMTLGyBgjY4z8MjCSz/qp67OccLHY7rEe7WYxKLTALB5BBwCHccpms8ZouAWdrgSDt3mCl1uEx+fg1BR6zufzi4JLmBAMD3bOPb5drVb25g6IlEolYxz4WZiK0+lkulXulw5IBHPuk03c7XZWKIpkh2DMc7Oh6MH5bveNGnlFPp9XsVi01pun08kGSFKE/SGLcjw+zR2AuZrP51qtVsYyEWyOx6Pev3+vfD6v6+trk4dwXM7gSGZK8Pfo2Nkb7oUgQNAj+LMv2Bb3D3CytwAJgZh9oOUrDscetlotK84mKLrMCDUOJEHSWfPvHk1jdwCYK91Ip9N2/A2Y7nY7tVot03ETYLkPjrBJlCaTiebzuTG+SBm4j1Tq3LY5CAK9evVK0+n0gvVbLpcmEyJwoA8naAAiBKz5fK7pdKparaZKpWLJHL+z3+8tUWQPYJWLxaIxkKxroVDQfD63bmTUalADAUC57BBSinw+f9GWOZ1Oq9frWT2D9CSFyWazBt50SUNLP5lMNJvNtFqt9PXXXxtIrVYrkyCtVislEgkbLst+I2+pVqvyPM9a/vJMo9HIGHYkD8xFATzxG5cRg012ZQnEJYCaZ5Jk4OTaMvsKGLnF3MQRN3lnn1kzki5J5mMuy+iekBC/iRsfY+/i67+/YoyMMTLGyBgjY4z89WPk//oLF2/fyBB423ODpBvkYWpgjwCH/X5/IWVwHQzDw6ldNosN5RgXFlE6a6crlYoFJ1g2l3k6nc6FrGwwmm4W7nQ6GQOD4dNBpVAoGFMDi+AyF7ytS08a2MlkYiwL7M9ut9PNzY0Oh8NFoSRv4NJT0JtOp8b64MDcF8BKF5f9fq+Hhwft93sbcEjRITIEglY6/TRjgtkl3Dfa9lKppLu7O3smjundN3w3OG63W/s7mJlUKmUzUI7Ho9kCzoLRoiv3/ac6BpgG/szz0mGIfUqn0wqCwFitq6sr7fd72w+0z7B82C/JAUHw/v5etVpNrVbL2gkjtSCAArgwHoCzG0Bce3EHHvIsu91OjUZDy+VSb9++Va1WM/3y6XQyNpQkAOkMoDWZTJTP540BYz2wM+wbP0PjjyRgtVppsVgYo0zAI2Btt1ttNhvTY2MXyWTSbKhYLFogJBFza0aQL4VhqMFgoFqtdgHC8/ncOitRMwCjRNCH+eP7kJUUi0XTjXuep3q9bkwrdQZXV1d2v8vl0mRHyEaIGWEYqtfrabfb6e7u7sIuTqeTFf9HUaTpdGp2SlzhnpEQUU+AbfA9+D72S7MAkjhAFTafOTduHMR+2XuSb+yRGADIAtDYku/7F8XDAAn+jEQtl8tdAFh8fdoVY2SMkTFGxhgZY+SvHyN5Kf2p67+v7vqZlxsAuAhugIh0ZvgymXN7zkKhoHw+b8fyHAlLsrd73mpZbAIWDuY+qKtdJ8AgE6D7EI7kvlFz3+7bKoYtPbVg5Rh5Op1qMplY1xwAhP9gj6IossFrh8PBiohhyiSZQ+52Ow2HQ5vqzUwSmEHWBcaGddlsNprP53YcCsBF0bmVK2uCBnY6nWo+n1sBJ8B7PB7tOFmSMUcMoyMowS5ks1ldXV0pn89bO2IAjg5EPDMgiRF2u12NRiNjGz3PU7VaNWbzw042MBfYEtKadDpt+nakGm73GvaYPxeLRbXbbYVhaPMdAEjWjiGdyWTSEppsNqv7+3uTSjQaDXM+N3j6vq/hcKj1eq3d7tzdS5LJIUh8YHTQ7GMraJNJnCqVimq1mtmjW2sAiwPL++bNG2O7YZer1aoFA4rRJanRaKjdbiubzRqT5TLpd3d3Oh6PVjQPI4+PIUVwWXrAdTabXQADensStkwmY+B8PB6NCeb7fd+3AlbmmZDYsSZhGNpwVdhEAJp6EQqciTsvXry4YEgZsPn4+Kjt9jwcEh8Lw/PwQoK/W9NBMkPQJYaR9MC6nk4nm9fCZ7kvKYVC4cKnAHf2CrAj9pEM0TWLzlvUIBDXiIc0IuDeSd6wPRIQWDziGL7CZ7kMH6CMX8XXp10xRsYYGWNkjJExRn4ZGPmxF67Pgp6e512wLxg8D4CTw0Yhi5hOp+ZQGD9BXno6EnXfMnlwjnldtpAJ7BS78qbKsbj7+cfj8eLo3T1ihCGAvUArG4ahHYly5A2AIV1IJBLmvBgeR7EwT+VyWanUuSCYQmCYS1i74/Fox9G8iUdRZBpzwANmhLdxtNtMlQ/D0Fi44/FoMpGrqytbq91up8lkoh9++EG+f9a4M8hxuVzq6urK2DLuZ7/fWwea7XarwWCgYrFoXaFyuZzG47ExDLAYmUzGZqlMJhPd3t5qOBzaHmDABIsPj3nZH47WK5WKsS8wPLBxrM10OtXpdDIgIUhWKhUFQWBFwjh3rVYzEPj222+NzXUZj/1+b/ptCnfZ/3a7bWsFCOL4yAAoeOW5cFLf91WpVJROP7XrpSidII0tu0lCoVDQZDJRrVYz9iiZTF50zMJOk8mkJQOwTQQWgi73iT5akgUmVwuNNvv58+fqdrvabDa6vb1VvV7XeDy2e+Ae1+u1yWqQsQC0MFY8O61++Xf8ZLc7t8GlUBtJTqlUMkkJsqXZbKbvv/9eL1++tGeF8Yui6AJMKNhm75G6YH+wwRSHd7tdW0sSL+ICXcUkGbMWRZEVPsMcMnSSOEkiVywWNRqNrF6HpJgEkZa/SNF2u50BAuAMKBDvXNmGq3GnVoC/l57qYojZxCTsL74+7YoxMsbIGCNjjIwx8tePkZy2/9T1WdrC42QsCDpiboyHIZiyABwTu2wLN+4e4bFIOB1gg7HzFs8bK2/h1WrVingJhkgt0DfjYGxMFJ3biGLUiUTCjlYBQdgNmLDFYqHD4aBqtWqMBG/lhUJBs9lMURRZe9rNZqNSqWTPVq/XrXMSXZn4DHd4HKwXTAZBLZM5TyNnzVymB0aFdrCAyXA4tGNnwKlarZpWlmNk99gfp0LXzv67DIpbbIvxYuytVsuAjwLYIAisEJd7dlkSHMj9M887Ho8teM/n84viThdc379/r1arZUkGsp31eq3j8dy1ir/fbrdqtVpmL4ClC6TYHdp85DLs52Aw0Pv371Wv143JTKVSJrtZLBbGOsKyrlYrK0xmpker1TKbdRnl4/GoarWq4XCoYrGo6XSqUqmk8XhsGm0SJ+xnPB7bwFFqNlgH7IMCX1g3GCUAkT0CWDjCB6xJ3nq9nr799lvbd2Q0MKL4C4ASReemAhTY93o93dzcGBhjY8Vi8WKOB/blsuvz+Vztdlv5fF7T6VSz2UwvX75Uq9W6YOix38lkokQioZubG51OJxvamMvlTHJBYgMgdLtddTods0XWDHAHbEl+AUkkKyS30hloGo2GFouFstmsgQJsdrVa1XK5VBAEFqtIpthjJEm73c6SBVjwD2M0+81nsRbYtMvewfxzEhFf/7MrxsgYI2OMjDEyxsgYIz9Ll0JJFkRhNwj4sF7ceBiGVtyIcfDmDQjAzLgyBH6WRYH5oNMMbJzbh3+32xm7wvfwu65GFLYNpg7HnU6nevHihWq1mg2TYxPYdI5ot9vzMEQ0uWzEfr9XvV639pzuMenxeFS5XNZisdBvfvMbTSYTHY9Hk4/QjQUQ8n3fjuLR5hMEAeJEImFdmCignE6nZvisqyQ9Pj7qq6++0u3trfb7vTFu0tP07FwuZ0P5Go2G6awZHEgRNbMrcPTFYqF6va5qtarZbKb9fq+XL1/q/v5e8/lcjUbDvgemyPd9cwjWD2YOHTgOtNlsTJ4AE7zb7czRCO606gXYGZRJkEulUhYkKR6t1+sXLCJFuejSOe4vFAo2DBCArFQq6vV6qlarqtfr6vf7dkzv/j4Fv0EQGBvDcM9CoaAffvhBmUzGAiwFrIA2cpJOp6MgCFQqlVSpVIyNwScTiYQajYaCIFCn0zGpTalUUq/XU7vdtg5jBH0YH8A1kUiY7QACMGvZbFY3NzcmiygUCnp8fLQ2sNPp1JIA9iyfzxvouYz3YrEwyQTAvFwudX19bfuHPbMudCFrNBrKZrPqdrtms/l8XpVKxQqib25u9PDwoE6nY0X6QRDo3bt3Oh7Pg1RhSf8/9t6sx67ryNYda/d932VuJpOdLFk2UAYKfijAhesf5gJ0UFXnR5xfU1UPRsEolMuyZVE2ySSTuTN33/ftfdj1Rc7No8sjHdH3wVwLECSRmXuvNWdEjFhjjoiAeaNwG3DY7XY6OztTKBRSoVCw75dkdSf4qxv0kedgu7CZ+/1eg8FAi8VCxWLRwIHvR6YEeHKqQL2CW0NAQtxqtWz/drudFamzziT1MLp8NhcMOMm7K0UjNvrXd798jPQx0sdIHyN9jPw4MNL9uXevDyIp5G2RAOkyNyw0EgoeEn0qm5rJZOxtd7PZ2ELDGHG8DLPDIgIKu93O9LIcvTJccTgcqtfr2VEqb7AUA3P86soquNf5fK7Hjx/r9vbW5AwcsXLkiaZ2OBxam1LYSYIr/w/7td/vbc1gSDabjTkkb/OAAAYJiwWQoHcNhULqdDoKh8P2nOwBUgKKJgEsggKtOGGFpKN+ng5HOBH3QneoYrGoVqsl6X6idy6Xs2BFsTBthwE89NSwjbCKBFUSikwmY8ETh4J12u125hiTycQMHSYD0EX/3u/3T/YGhqTf76tSqdgskEAgYIxUvV4/YdqkI7iMRiOz8VAoZIGLwsx6va6rqyu7D47XYapghcLh4+yPwWBwohFeLpdWyAnAknyhyW82myY3ePr0qWmLuSfY7n6/r3g8bowkTCVJFEFmOp1a0XMsdpxPQoLQ7XYNNCl4l+5bqsJEwwJnMhktl0ul02ljEAGfw+FYbE29Cv5GIfxut7Nagj/84Q/K5/MmSUDaQ7F0OBxWp9OxNYXBHo/HloC6NRLYMUzXeDxWvV63BMoFMLqIwZwS21zmlGGa3W7X4geDW1mvxWJh34/fsP7EJgI2ANVut62AGmArFosW+D3PswHEtGomEQ8Gg2bHgBrfH4vFlM1mrdMT7LZ0/1Ig3c+D4gWAv4cV9q/vf/kY6WOkj5E+RvoY+dePke+7fnDTDB7ODQ7BYNAKNzkKJ1CHw2FjAvh/Np2F5hgVVg0HQ4/OMS1Trvf7vTF0gA8ONZlMbDHR7fLWShtQmCOMNplMmoYZJol7wqBXq+Mwt0qlYvM3zs7O7Hd2u53NHBiPxydv5IBPNBo1x5xMJtbNhcJCdMJ0zIFZSCQSyuVyBi5IMyhiZBBjOp1WqVRSrVazzjlISGBhCPTz+fxkxgHMUqvVsqF4ONlgMLDgTGE3DAABhZa5nnfsTAMwFQoFhcNhY3VgTdLptIEIbNhqtbJ9Zv8IFoDHZrNRq9UyZgjJBfZHopNKpUxqQFBBGoMNw+rC8lHzwB6xr3T0ApRdKQCa+lDoWNg5n8/V7/fN2fl8dMyANPcvydr/3t3daTwem13DqqxWKzWbTev8hA+gZWbdpOPcitevXyscDpsNAN4EdI7HeaZ+v29yomAwqFwuZ7puimCr1aqxs6FQSK1WyzTky+VSjUbD/AXJRq1W08XFha0V/kFtRjabNVkDrWUrlYr6/b5isZhKpZJJYvr9vtklzJJ0ZLrPz88VDAZt6j3JKomAK1VaLBaq1Wo6Pz83BpY/B8yxO6RZyEmwi/Pzc9tTl/WE+YPJg1GmuBb5RKVS0XA41HA4tC5o1EwQBylyhlll30g2sFNkDtwrdgXDj/QBcH03ZvNCRXIi3YMLJy/+9f0uHyN9jPQx0sdIHyM/Dox0X8zevYJffPHFDwKTf/qnf/oim81KkrFxkuwmOXaDuXFvcr1e2wLx4LBdBF3+vdkcO5IQWNDBE1To2iLd99xHPy7JtNZuQOaNG2kAG0RwicViZnj8LkGBI3befmHjYEQI7gSH+Xxu8woALgwBnX4ikbBAOp/PrdiRAIFRSvddeTjOXiwWNmdiMplY4JFkQQFWjXairuaf/0e3DaAmEgkrkkSzDwMDA4HxcwQLy0ZAhtkslUqKRqN2zxgqwErSQMDHiA+HgyUfm82xfSvfHQ4f52NIx0nosJ8u0wtAcXzNvZEwjEYjZbNZK8R98eKFFXeiy3YlMtgwnZpgYIfDoXVrevXqlZ49e6blcmlH6thlsViU53kWTAFOWF32t9VqGVBzD7SDJaDAgrOHJGrorimiff3fbXRdFmo0GqlarWoymahSqZjdsi4c9efzeQtKweCx/Wk6nTY9PesBI4meu1ar2ZrBSLrs82q10nA4VDgctqL1dDqt3/3udyoUCjo7O9OrV6/ss0hwAoFjp6nb29sTxtvtqDUej9Xr9axYGPb+q6++Ur1eP+noFA6HrYU0toaPY9vuMwHwyDUSiYSxpMVi8SSRQkYSCoVsVoyr/ccX3Nk84/HY5vd0u11LfJFIAPqwwYvF4kQPD/C78dFl/tgD/o65PNJ9LZEki93SfW1KPB5Xv9+/++KLL/7XDwKOj+jyMdLHSB8jfYz0MfLjwMjZbKZf/epX/+PbsOCDvHAVi0ULWHSbYcHQE7sBlH8fDseOJEgdYCEISJJM+w2oELR5E+bNlkDBz+PgGJjLdHDUCnOHoRCcstmsNpuN8vm8/Z0ktdttYyGQdwCYaFiROMAw0ZrSDSiwkJFIxD6Hjd7v9xoOh2o0Gsrn86pUKraRlUrFZCM8ezAYtMGTOABGCgBjZJ1Ox97aASCKtzn63e126nQ6tmeSNJ1OLcgBfKw3k9nL5bLtPaDLz2PcMI2r1cqKaWHCCIzYEIGcwLDdbs2xd7udBoOBsQ/okZGKuDNLCAQcxY/HY0sEeHZYuFKpdHKsjpYcewLAeG5ssVAoGOt1dnamTqdjEhzAvVarWf0E80soQF0ulzo/Pzfds+d5NhPl9vZWpVJJyWTShggmEglVq1WzRe7J9QtAI5PJqFAo6OXLl/rP//xP65AFWIdCx+GHSIzQnMMW9vt9Y5Xc+h2SvtvbW0sMYHhJLln/SOQ4lPH6+lqTycR8kX1iHV0WH1BDfuP6ViQSUb1eN0kOEhcC7Gg0ssGW2WxWb968Ubfb1bNnzxSNRjUcDvXkyRNL1pCR8HsEd2yUuIIcBmlRs9m0pIvEmBoWJDZIs0jUCOrb7dY6Ph0OB+VyOV1fX1tMjEajajabdg9IoUjK2WNqO/B7GGq+k8SAtcEGKdrHXmBBkbSQcAAksO/hcFi9Xs9/4foel4+RPkb6GOljpORj5MeAkX/RF65//Md/NPZO0v/WDYgja46FeVt02Ztut2ubJ8mCuquNdhcnHo9bUAYoaE2JUwWDQWNHKKB0QcoFD7TfqVTKZldwb0gVAB5+B2dC8sDneJ5nQ98kWeErAb1QKFjxI8/o3iuacoIHrVgLhYIVxhLkx+OxMY4cx8KWoCUnUIdCx+JLWuFuNhszPunImrTbbXmedzJQbj6fq1QqKRI5zm7gCPpwOKjT6RhQUpyL8fLMsJWutrfdbqvX6+nzzz+3+4LRIWi7jKLLCJMcBINB1et1ffXVV1oul3r8+LGxFLCaTFb3PM8YTfbf8zwL7ASG8/Nz6+LV7XZt/9DvVyoVs3MYte12q0KhoP3+2J2J/T47O9OXX35pXZ+CwaANFwTICaoU+zKYMRgMmi4eKcloNDJ9cL/fN407vob0geJP9iASidhMizdv3iiTyZiNh0IhY9/43tFoZPIWWE/YI5eJR9KQy+VsjQA4ipzxoXcLSW9vb21NkNzw/c+fP1exWNTvf/97A/nJZKJIJKJOp3NSWxCPx3Vzc3OS+JAk8GepVEpff/21Li8vbS0uLi5Mkw7j/+TJE7VaLd3d3Smfz590X9rtdnr16pXJWWD9PM+zhIgZM/g/DHQ4HLY6jcPhYOw49rLf721WTzabtYA/n8/VarWUSqV0d3dnewHI41N8zruMHD+fy+UsQSIhg+FD1kMSi4+yV+wzLwS8JPgvXN/v8jHSx0gfI32M9DHy48DI8Xj8//nC9YObZuDABAUWnxuCeQIUYOt4S+RYEQciQNNthWAIG4IOlJ9ng3lD5chekjmqG+hxPo6GYXtYRI6keR6MwNWWLhYLe0aYKNhAScbkBYNBCxA8by6X03q9Nk0pQEz3IoI1AZSJ6J7nqdvt2nqwnrvdTqPRyIYCwlhyDMx9IFNAw45OmqC7Wh1b0sLooOvnezBKijEJ7HScwgFwFpxsNpspk8kgRbIi6UwmI0nGfsL0wprBimDkrCuFkLQBlY7B9fnz5yoUCsZ24sjYGoXGFLDG43G1Wi3tdjvVajWNRiP9+te/1s9//nM7koYhIdgTeDzPU7lc1mQy0QQC+/YAACAASURBVPX1tarVqiUbm83G9ODxeFzX19fKZrPabrcmkwDQYRuZSUGBNoEeQLm8vJTneTbTAilNPB63rl8Uw3c6HY1GI1UqFasliEajKhaL+tnPfmYMIGsJ+1mr1fTll19a4seeICvq9Xo6HA7K5/Omre90OtYSOJfLKRaLqdFoWME3Rd4ELthsQDESiVi3K2RGtVpNL1++VLVa1fX1tTKZjJ48eWIzbJAnbTYbTSYTmyVCR6bpdKrlcmkSgEgkos8//1ylUkmdTkfNZlM///nPtdlsjMWiCL5QKCgej2swGEiSKpWKSVkikWN3rWj0ONskEDgWOgNA7DfrR1IGqALO2WzW1mcwGNjpQ6vVUjQa1dOnT3V1daX5fK4f//jH+uqrr3Rzc6MnT55Y8EfSQRLsSoqQf5E40hWKGMjau9IwkjbuHQmPG8ORSRFb/eu7Xz5G+hjpY6SPkT5GfhwY+b7rg7SF5yYSiYQ5Pzpr5AiAAYbKcSi/DxAQ+Dh6RQ/KMSNH8RjNfn+cU8GbJseUvL3CsHBfAB5MHoWTkqy4FSkHBrBarUw7DfChvWamAEe/HPlS4MoRf7Va1Xa7te/zPM901e6RPc/OsbErM3A3HhYT1nI0Guny8tL0ucFg0OZVoBumKxXa2Gw2q3q9rtevXysQCJjUBMZmPB4rHo+bswP2sDmwWbB07hqn02kD+ljsODei0WgokUjo008/VSqV0osXL0xLznE0xg67udlsrK0t3YwIyLFYTOfn57q6utLt7a3pf8vlsskR1uu1MWg4Mce+aLwBnUwmY3pp/mw4HNrMj0ajocvLS9M+53I5JRIJtdtt035PJhMVi0UNh0NjtfL5vNktn80+whS7f55KpVQul9VqtfTixQu9ePFCv/jFL3R2dqbD4aD/+I//UDKZ1I9//GP95je/0S9/+Uv7HAo+m82mpCNoDIdDk/gg2yEYMlMDn53NZorH4/ZZksw2JKnb7SoeP87byefz1ukqGAzq9evXJlshoO/3e7P5brdrvgnITKdTZTIZS7pYIzpLkUhiw0h3sP+HDx9qs9no7u5OpVLJ/BpGm8RvPp+bJOv169f65JNP1Gq1rNtWq9Wy58QmkBulUillMhnlcjnd3t7a38Nov1sz0G63raAYWdNut7PEbT6fK51O2z4jxyExJI6Mx2N1u13l83lLolxmkr0hvuKrrAHfT6c4kiRJdmogyVg/EmpXFsP3eZ53UlfiX9/98jHSx0gfI32M9DHy48DI910fpC08b4cwC5FIxJg3gowbbAiGLsMC87Fer002MZvNbAjhcnkcRDebzYxJQBYBWxMKhUyKQRtR6X52CH+3WCysgJC3azYE5ozEgq4kOADH0jgCml6AEOOTZM92dnamTCajbrer8XhsDAoTzweDgTKZzAlo7HY7e3Y2HscajUbGBrKG+/3eio4pToTtw7g4BodlYG4GjkPxK2/5tVrN9Mbozek2BaASAJfLpR1nl0olK3Su1+sGjJvNRi9fvtTf//3fy/M89ft93d3dmcYYlpJWxOwLs1e2261qtZqi0agNwYzFYnr06JExfoPBQHd3d7ZeAFqlUtF2u1W73T7p4rPZbHR5ealarWZMcqlUMhurVqvq9XonkhD3qDmfz9t6hcNhm8BeLBb19u1bA6RPP/1U0lGWQpEnrY1hLJEtwPrt93tLWl68eKFIJKJPP/1Uv/jFL9Tr9dTpdPSjH/3Ikq3BYKDRaKR8Pq/ZbKabmxvV63VjLpvNpmq1morFok2Yz+fzmkwmarVayufzGo1GGo1GKpfLKhQKxiTCFqVSKQMTEgAC33q9tvWCjYMxR/qDBAf2qFqtSjoCDYlIrVaT53mmMW82myYpIt4cDgdLdmazmd68eaPFYqHPPvvMmPnVamXs1eFwLJwtl8vWNQ1Jw3K5NDshzmB/8/lcvV5PxWLRfA7pwG63swQZuQsSEJgxGGikTDDRvV7PANKVeCGRIBmtVCrWlQo/ku7rRPg8/AuWFKkZBeL8XCwWM9mXy4zDOLqadBJQTmT4Hf/6/pePkT5G+hjpY6SPkX/9GPm+64MNPuY43i1AIyhut1tzPgwGDTb6WN7yU6mUMWRortHGsoAwMq6MgYVyAydBD2DDUWHI0JpzxAjThrGs12uTL0QiEZXLZQMt2sr2ej1jEx88eHASBGezmbEN6XTa2DoCD6wOjpNIJHR2dqZGo6HxeGy6Z3T9bmF0JBI5YZqYGF4sFo2xwfCYY+AWDaJpRr6QSqWsixVMB4wFx/auNAJQ7vf7SiSOE8LZS7TU0WhUjUbDukMhgen3+8YceZ5n7UbpVjSbzfTy5UvT+qbTaWOHYV/RgbM2JCLcG0DNIMrhcGjtfmH/Pv30U93e3urq6krZbNakKeFw2KQekmx9KJrGLrF3GDOSF4II3atIWAgyjUbDuvJwxA2jGgwei8npLISMpVqtaj6fazQaKZfLWQ1BOBzW1dWVMpmMDoeDsZokRAy1TKfTJlXCttFiE1xIuEiUYJmy2awKhYKGw6H57Xq9tkJdunoBTCRp+OdgMNByudRsNlMgELACZwLo7e2t2TlyJhKldDqtFy9eaDAY6O/+7u80m83MzjudjqLRqOLxuJ49e2Z1CARpfJWAyho3Gg1LvhaLhUmCSAQAgGAwaNIgCoVDoWNTgW63a/IhJAskUMgiqCVBGhWPx60lMDGCRBeWjL3p9XoG2sQMbAUWH5aSIO8W9RJz3X/wE1fiBqPHM3NvSJdg9pBRcT/+9d0vHyN9jPQx0sdIHyM/Dox8n+z+B79w8eGwSDgUDkLw44iPI0+YLdgBivd4W4Qlcn8X6YB71MemsyCe51nBHeDkzqzgGJWFQnPMhi4WC9OrMvyMtqJ8J8GCFqA4PVpZNpMgg5abwExghDF7+/atPM/T2dmZHfHDjAQCAetOBPtGNxyYw/1+b1Pt2+32SfGyCzIYJUMeMZBoNGrzDMLhsBUoUviLZIHBcq1WS8ViUaPRSIPBQGdnZ1ZsSQA+HA7G4oRCIb148cKKpG9ubvTw4UOdn58rlUrp7du3ymQy2u2Os0EIhJHIcUZNo9FQuVw2dg+gHgwGpteXdFIIC0vZ7XY1nU6tbet6vdbZ2Zl10wqFQmo0GgYMt7e3ury8tC5Ek8nE1gZNNAnKarVSNpvVfD7XYDBQPp9XJBKx43IkMsg12Kv9fm86epwWdu3s7Eyr1cp03IFAQG/fvlW5XNbFxYWurq7MH9Bqc/QuyZg65BC9Xu+krS42gd+S/LC2pVLJbB+/6Ha7NqCRuRgu44uNAyCpVEqNRkPZbNYKy7mveDxuCR+ae1dnTZLDYMhWq2XzTJrNpoFiNHrsfIXsKJfLaTQanQx2BeCxF0B2vz/OjqFzWSh0LNSn4FaSgRQzSvb7Y5eo169fW50JjDMFzm7S47L/sMzB4LFdMGsGOMdiMfONSqWicDisXq+nfr9/MtcnFAqZTIzATvJEzKVGhr3DhmD4ARkAFJDh74i32AV/D8jz5/713S8fI32M9DHSx0gfIz8OjHyfCuSDCPI5CjwcDieD9VxNOZ2YuKHpdHoiqcCxOGaEfSOYYphuW0eX2Xq3oM0NhBwRchxOQSXFq/P5XMvl0oIw7BYaZgoZl8ulBSSMdjweKxaLqVgsajAY6Pr62tipUqlkBbQcfQN6GNR4PLYOM/v9Xu122zYXllOS6WxXq5UFh+FwaAwK8hGYp2w2a0XFdKDiODUcPk7lZnggenS+MxwOW8vVYPA41C+Xy1lgorYgGo3q0aNHphXmH+l4HMweFgoFlUolA6lOp2NFyDzv3d2d/vjHP+o3v/mNvvzySwNsZCivX782oMSG+v2+HXMPBgN1Oh0tl0tbn0ajYQwPtgfLR3B48OCBzQJ58uSJPSdH1pLMXmGgYGRhn7LZrLrdrumc3doCl1lhj93CUQIN9gDISdLd3Z2twfPnzxWNRlWpVJRIJNTpdNRqtXQ4HE504gRpjsgJyjDOv/3tb027TtJEQiHJWhgT9CVZ8A6Hw/Y7sFnIJjimz2Qy5lcAe71et/bN2MdqtTLJALZHvFgul+r3+1YUvNlsrLUvnc9CoZCy2awOh4OGw6HtCwkGdk5Sg4wLJjcYDKpUKpnEis+jyxCMF4Mms9msFouFBeX1em3zWx4+fKhisWgF2+Hw/dwb9OHMOsF+AVYK+avVqtVPADj8Hfey3+8tUSBOcRoRCoUsASVuwOzDhhJj3RcAN04Ta/b7vcVUvh97cut5/Ou7Xz5G+hjpY6SPkT5G/vVj5PteuH5wW/h//ud//gJmCLYJ+QNGwgPAAnFDGBsMWjR6bNkJ+FDsBgPEIgEgvFUTeFh0dKxuMSZHjEgGKFaEDaQ7Ed8PAwggBYNBK9bDYIvFonq9nv28e+RZqVTsvpPJpBXPcizNBhKoaIf69u1bewvn6BLWiKPOfD6vZDJpcy5wmslkYqwdAxxh3dbr4wyEXC5ngAZbxps6xZAECXTPyEu4JwovGZRI8XC/3zdGFrlLLBazvY7FYqrVaur1eioUCiYRmc1mur29tXa9AD5A0uv1jAl7+/atJpOJqtWq1RtIR30zUopSqaRgMKhXr16pWq3acEG3OLjX69l3wWLA1MxmM5VKJQUCAdNrk6zA5mI/HMFjw8wbQR7z8OFD0/26x89Ie/hu7GS9XqvVaqler1t9QrVatWQkHj/OB7m5udHjx4/NhziWR28N2+zKAHK5nBWOAmywPayL53kGbvv9UVdNITEBB3BCboRN81ndblcPHjyQJBtEuN0eZ7DwGdwXjBt+BwATwAKBgHq9nu2PJJOGUDy9XC5VLBa12WzU6XQUCoVMvkDB/n6/t2R0PB7r5ubGOo4Nh0OrNUGaFY/HjRFloKUruUGzHQgEVKlUbO1IEFhfmHsSJuJkv983xpj1pmsaEg4SSGIeiZ57weK5OnUSHb5rvV5rOp2ajXIRZ93EFaCCtUMqAYhGo1ENh0O/Lfz3uHyM9DHSx0gfI32M/Dgwcjab6R/+4R/+cm3heRskyAMiksxw5vP5CRtDJxiOCZEi8MC0rFyv19YyluM83kgJWvw/jEMgEDCjhQkBqPgcAgJs2HK5tOCCM2YyGdPLw5TlcjmNx2OlUinN53MbsohsAucCWAjYsAeAKsWSfE84fJyajuFtt1vrIIXhusxPIBBQJpMxR8GIYSX5udVqpVKppEqlYo6Ko3Hf1AOwvrPZzJwKTTaOzZ6yti6Qo2snOHIET0BCu879ZrNZXV5eWgBfLBYGkNjN69evtVqt9PnnnxtDd3FxYXMX2Lvb21s9fvzYjpTb7bYFT5ye4BOJRGw9WFMKWHO5nDkytomuF5aL+3edEkerVqtmB/l83hgemMv5fH7SOQs2GJ8gWLx580b1et2SlUKhoNvbWz148ED7/V6PHz82CY3bkpc9XK1Wpo3P5/OaTqc6Pz9XsVg0hon9dBM4/s6Vebi1DQwOdf0e26OF62Qy0eXlpTG0xAJm4rCe+CX2iG25TDr31O12rbA4nU6bFInCZfYjl8uZHAgWkRjQbDb18OFDpdNpbTYbvXnzRpVKRbvd/awW1uJwOFgyRUyIRCJWcA9zhoQJ1g0JBBIY1iYYDFqNhisNYV3c9XQ1627djxvbwuGwgQTSM0nmyyQugAPJgRu3+Xn+/W7cxjZgDmEJ/ev7XT5G+hjpY6SPkT5GfhwYye992/VB0JM3TBaem3GP+QjgBFuK9Ha7nRXcukd9kchx2rOrWXWDDCyKy4Kx4Dgli8kxH6wibB2bSWAjSBNMOCbebo+D7DiShtXBURliR8CFmQgGj8Wdq9VxDsmjR49szgbaUTTx3CubSkCSZM8DIANQ2+3WukLxdh0IBKxdrOcdC1/fvn2rBw8emA6WwIORoz+FLeTK5XInmlrWDTnCaDQy599utydT3rm3bDarQCBgz1goFMzJ4vG4sWgwVOFw2O6RvXvy5Inp5hOJhB49eqS7uzuTCwSDx4GJDx480HJ5bCs6HA4toHW7XWNTSVqY8/GuHUwmE2NxaZuLLXH0DYuLvh8ZCnrqcrlsIM46Y3d8JlILkiKCeDQa1SeffKK7uzt1u12lUin1+31lMhldX1+rXC7bsEekOlwkH/gJdR6BwFF3z94Fg0EDmnq9brUUHMWjJ+d+KJongeA7XY0zUgqkSthpKpVSNBrVYDAwzT26evzSZcyJGdFo1DqNSbJZLBStEySRQ8GeE9j5vVgspkwmo0ajYaz7xcWF1YpEIhGVSiXtdjuzZ/aWPcfG6US2WCyUSqUMQPB1koybmxur5eDEgvqNVCplQCkdmUGAOJvNWjyTdAJwJMwuW++ydqwdYMMaYgf4L/EFwHHjJ/fE87MXrmaf+O5f3+/yMdLHSB8jfYz0MfLjxsgf/MLFmy/FuhiRq3XkCDYajdoxniuZIGiwKPTUR7LAXAlXB4sju0eUaH4xBJgT/hzWiU0BKA6HY4EfzBqAxsRpwIS39VKpZAZBMCHQ4mQUPsM6FItFhcNhm9MhycCUlpq5XM6misdiMQ0GA+12O2P3YKJgVjg6Zy4ExhsOH4cF0vEmHA6r3+/r7OzM2Az3HwB+MploOp1aAIW9oO1qLpczw0NbDxsIQ1YoFKzLzXA41OPHjxWJRDQajexnKYamDfCTJ0+sePXzzz/X9fW1ksmkcrmcptOpHj58qPF4rFAoZAMMafXqeZ7q9bpKpZIFl2g0qmq1agWeFKzyHARbikM5UgeAYdOQACA7oV4BRnK73ZoOOxAIWJIBm7RcLjUYDIyxYVgofgPzDBPLdx8OB52dnen6+lqed9RIw/7M53OTZDCTo9PpmJRiNBqp2+0qFotZTcdisTA5hSSzI0Ajk8kY2AMaLjP3rm4Z4GQN8PtsNqvpdHpSnIwUYbfbWTIGW8X6IgUgEOKf+Lakk/sKh8NWKwIAwvzCMNGta7FYWCvaWq1mf05RPc8BULpMmSR7PphxAFaSyTxYN2wM5hTZBD7JAFfpvo01CSWMPl3piFWsUzh87CBH+2c3eXelMfw5iSg/y17u93v7WWKXeyqCjfDZLsuKj/vX97t8jPQx0sdIHyN9jPw4MJLP+rbrg3QpdL8ENoW3a27UNUYMheD77tEzGl7YBowMRos3dL6TwDObzYxJwXgxBhdwZrOZdXJZLBZ2xOgeQ8LA4fTxeNy60CQSCbtP2Ae6PAF4HFmzJjwjjsWGYyQ4AfeD/pk1gZVx1y+bzRqziTwCow6FQvZG/+jRIyvm5b5c5hMn4ngbYAgEjm12YfHco9X9/r5lKMAGk8ge4Hi5XM7WdLlc6uLiwpyWAB8OH7t3lUolC7DMeyFgV6tV7XY7K859/Pixut2uDofjnJbb21tby3q9bowmgAKLAhs3Go2MpYU9BvA2m43y+bw6nc4JuwH7hT1TKAlTBnBms1nTVVM8i6SiVCrZkD6Cged5Nn9mNpup3W7b581mM5u+3mw2VSwWzc5ms5kFZ7p/UZvA39O6eLe77/wFs8z6YfvJZNLYWen+6H65XFonMBds3OJwPmO3OxauU4PA/JL1em2BmCBF8oZPFAoFs0lqUmC+6JbEfpJMcY8kUZvNxrqhAbDpdFr7/d5kTtgm6+V288J/8Ed8gZbFPONisTjpEockoVKpqNlsWpKaSqWMPUOPD4tNkTvAxmexhtR/uFIaEm9siviIHfEzLhPuJkLEJOIIn8Was6b8mSSLi/71/S8fI32M9DHSx0gfIz8OjPyLvnBJMgcDSAg2sFscv7lGxAOhez0cDtZN53A46lX7/b5tEuyZ+zvuRk+nUzu25IFhwlzWC5aEt2V05fwZb+7MgOBNmvkOrp4bcEMXT0AE8PgZlwEolUq6vb215CWRSKhQKJhGmzdsjindDeVtH8Mh2PJ7sVjMGCscEkODvXElJRScxmIxCwoAYDQatTXgGPjdAmQCAJptjtI5gub7AExAPRwOG/NSKBQ0n89Nvw4QYAvIJJj9IUnZbFatVsskKmi8AfbtdmvOw7OwZgRCfg8dOjYVDAYt8GQyGY3HY9Oxw6RQcI7zcSQdCh27+DQaDR0O961MAVwGKbIn7O1+v7cBk+Vy2Zwfv+K+YC7b7bZ1YkKGNBwOFYkcuz3Bfnc6HSWTSXU6HQ2HQysEBjAImuxrMBg0n2K9x+PxSfBC7sAzuBIatz3scDhUOp22zkSe55nch8Jb/HqxWBj7jyyAYIp0CUaW1raTycQA0rWLRCKhZrNpAMxaUHwMA41kBBCnuxbxhvqBYDBorZ2TyaTJNfA3l2WMRI7thEkCSAaxEfacxBdgicfj1vlrt9vZf2OrxA4SWz7H/fP1em1xydWhS/fzQqjBIjbzZ4FAwDpDkRiwptg934st+Nf3u3yM9DHSx0gfI32M/OvHyPe9cH0QfQhO5n7Ruw/Eg8PMSDpxFI7n+DlXowxjRsCnUJe3UTrvwKLAirHgyAjePYKdz+fWmYR/cx/uM8DAdLtdTSYTjcfjk4JenB8QZSAerMR0OtXr169Ng8q8AyQEGANHuGy4qxElWLE26IlHo5Hp3zl25r7QvgJsvJmzLslkUtls1pgtlzVdLBbmXLPZTOv1WuPx2D5TklKplHWDYs1geHAQiqPRYcN4hMNhNZtNW7PxeGyAFI1Gjd2CFYMR9jxPhULBmMtisah0Oq1CoWDthRmoR9Eo9oWdwMIA9jBhMDfj8dgCjOd5VsTM72O/2AtMLrKMwWCgdruteDxuXalWq5WxQ/P53PTn7lG7JCuMrtfrNr9Ckq1BpVLRer1Wu9024KM9L34IqwXoP3/+3ArfCXqu3d/d3anX62k4HJoPdbtd27f1em2gOBgMNBgMNJlMrM0vSUg0GtXt7a3C4bAGg4FevXqlw+FggwyRqJRKJQNs7v9wOFhb6dlsZkNC+V4Y0Gg0qmazqbdv3+rVq1dW5+EmD7SjHg6HyufzOjs7Mzvm1ACfpqg/k8mYj8B6Afx8Bzr4fD6vfD5vXbrYQ2obAD9YYDTqJG+73c5aI0ejUZXLZR0OB3W7XWu7jr279sWeujUDsIr4DYBCnDgcDuZP77LZ7CtJJ8mh2+mOuE489l+4/u8uHyN9jPQx0sdIHyP/+jHyL3rCxRsmgRqnIxjzxs8N8cbPGygPwX+/q1clEAD2y+XSNp6j7Wg0akwfwRhHx8iQITBTBMaJYM4RMWCEgcDIUTiKAaCFdov1XM0nrAMbzZC2dDptXWxwbJ4/nU6bRpz1IqC6R50c249GI9MZ45DNZtMmvtPqdLPZmOY9mUxqOp1akWkweGwzCkAGg8cWoHSxicVi5vh8P+DstiJ+t2AYJyAAwKy+ffvWpC8EONZkOp0aS8D3hcNhY3dZSyaSA5zuMS5MW7lcNrYIBpF7ctmLzeZ+/gLdrpiTw33gyIA7rXZJNkh2YGxgvRqNhrFnFMvu93sbxsl30TIVG4Q5CYfDKhQKarfb2mw2Oj8/P0m8kGVwzI40ab/fazQaqVwuq9/vKxQK2RwZwLbZbFobYXeNUqmUBbVyuWz2ie1TvOt5nrG+u93O5mj0+337vdvbW7VaLT179swAYr1eK5PJKJ/PGyjTHQ0/3O/3Zrcwii4zSltbEgZXysB9dTodBYPHbm/4PH4WjUbV6/Us4OdyOUtssHGYUVrfEteoWcE2iBWuFIQGAel0+n+Tl+CvnERQmN7r9VSr1SyZI2ZNJhNjPLEv9gzJC4wj+0TSTSwKhUKW1L0rC+TesRuXpWZ/2BPivX99v8vHSB8jfYz0MdLHyI8DI993vfeEy/O8/8fzvPz7fma/P3YkYcFoV4mDuw53OBwsKKNlhfUgGPCG6X7m4XCwVq58Jp+PNCEQOOqZCW4AEzpkV8bAIsPk8LYP4+YenWMgbjEpYMKbOsC42WyMIYhEjlPu0fym02m1Wi0Dxn6/b2DBIL/ZbKbZbKZkMmnGNpvNjPUiONMJhsFxu91O7XZbjUZD3W5X3W5X19fXpp3lszudzomWHJkHhY0EsWq1qnK5bC1VeWuvVCoWWAki7Bfg1m631W63bTjgbncseGaIZ6VSseLafD6v29tbYw32+73u7u6siJQOXBStkmRst1trIYyjE6Slez0/LBaFswAdawmwtFottVotTSYT6w602x3npnBsDdN0eXlp0g5+f7lc2hH54XBQuVy2YudkMmmsKNKaSqVimnFAMp1Oq1arWceq/X6vi4sLPX78WOfn56pUKicBjY5E+/3e2FJJFvSLxaIViVOkzZ6TgAEuaMIB6O12azUYfCY2AIvbarV0fX190vJ1vV7r/PzcvrNcLuvt27eaTqfWSrjf76vX66nVakmSDV+NRqMmw8jn81agjy/CTr169UqhUMja8bqDTQeDgTGEJILITtrttrFt2C9MNs/AaQJ1G7CfJDgEd9YDWwfw9vu96vW6AoGAxuOxtb+FdXaLiWHQMpmMdrtjpypYaUnWfcpN1JF8uXGPpI54QILrgg72hw2yPtj5crm0RGu9XpvPSzpJellz/7q/fIz0MdLHSB8jfYz0MRJ/et/13hOuw+Hwr+/9bd33tKc7EW+VkuwYjrdAbp5jRBg1Jkxzwcpw/AnjR/BmkTzPM+Pg7wEtz/OMwXK147ydwhQBIgRTmBWOHl0JBYWJGBKBWtKJVnU+n5vBrdfHOQClUklXV1cqFovW2ejhw4cnOlyONdfrtc2noJ0sbMB0Oj2ZU4DGdLM5Dn4EiDzPM20+WnxYA/TwTFhfr9em9eW/WXdmUpAocNQMaI/HY9tvz/Ms0BJsN5v7YmX2hjUtl8u6ublRr9ezugHYXJzBddR0Oq1er6flcqmbmxvF43E9evTI5qDAwlEUCjjCPhBoXT0vbA8A7TIzsVjM/h7bpRaA74JthkHGucfjsQXup0+fmrNSGNtoNMzmG42GcrmcST/S6bSxTcFg0MBwtVpZcTXf5xZWkwyR+Nbt/QAAIABJREFUmJG0cXyOPGC325m+GzshyWLfh8OhBoOBfbe7JoVCQev12n6mWCyqUqloMBjo4uLC/JUC3bu7O11cXNhwyGj02Gr4D3/4g4rFop49e6ZMJmPPg7/SySkQCNhsH4r48RcAA501oAN4zudzPXz40Nb57du3pjGn1oD4BaMXi8VMakNChQQiEolYu1zWAECkaQBsPnbOiQBJz3K5NB+NRCJ2mgDzhv/g2yTHxAr8ii5Pu93OGE4SAvdEYLVamd279SvENUAKlo9rOp3aOhLfkDH51/HyMdLHSB8jfYz0MdLHSOKbG8vfvT7I4GOOot2gS4Bx3zwxFAIEjup5nh1DslmBQMBmSEQiEXvr5aEwfCQK7uLBRrHI5XLZdJ9ckUjEgls4fD+53ZVLzGYzA6H1em3tQzF0jpQTiYTp1jmWhllhM0ejkarVqiTZ/XBE/uTJE2NS3La+n3zyiW00z00w8zzPtKVID2j7SvDs9XpmJKvVyroT4Tx8P6wOnZkw3Gw2a0fUaL7L5bKk46R09o8AB5uITSARKRQKBsTpdNqOdefzuer1ujqdjh0tsxeS7PnYA9qHRiIRffrpp6b7pV0wrAOSGfZ5OByazMDV4NNVBsY0l8vZPBKY4lQqZVKO1WplhawkH8yFIKEBzN3vm81m+uSTT3R1daVyuWxH4DgqzMh6vdbd3Z3JXQaDgVKplMlXMpmMIpGILi8vzdaxWWQjnudpsVjo9evXFtgBi+l0qnw+r0KhoOFwaDbktktGHuPWMsBsAX749KNHjzSZTE5kFbFYTN1uV9VqVZ7n6W//9m/V6XS02Wws6FIoXqlU1Ov1jGEjMJPI4W8kIjDnsLqe55l/EXN2u2PtSTabtUL0ly9f6uzszFrfArB0PXJlFjQi2O/31goYVhGb4J7x5+FwaEkgiRDACyiRrEwmE4tZ2+1W7XbbupIdDgdNp1Nra0stSygUOplrA1MJk86+UQwP402xNWtDYs+zoafnM/lvmDvWmPhOgudf3+/yMdLHSB8jfYz0MfLjwEhe5r/t+iBdCmG86GSEBtI9UoaBgvFwdZC8pWKwqVTK2Jh3u4FwJI2MgqI2mDc0rbCJ2WzW2DTkB6FQyIyQY3OYHBwIFgpAgt3liFKSKpWKSSFgRXhOCm35u36/b8PjqtWqvv76azuaXK2O7UEbjYZ6vZ4F3FwuZzINtOtuUPW8YxEr8zc8z7Mj2OVyaVph5BLIOSiopggUMHADL8fJlUrFGLh2u61IJKJ6vS7P82yoIFpavpvgEIvFlEqlbJDdb3/7W3322WfGlsFAZrNZ3d3d6cmTJyfdlNhz9t/zPBWLRSsG51gYYIrH41ZsigxkuVxa8MYmAZrpdKp2u62bmxv98pe/tJklsMAwrwRpjpphZUhwYJDRES+XS5vUHgqFNB6Pbd1JBGgjTNAIh8PGCj1//lw3Nzf66U9/qkKhoPF4bHY+n89VLBZtvdGwU5iODIJ1xa7H47EKhYLOz8/V6XTU6/VUqVQkyeQXgDOASHtcGB8X3JPJpMrlsqLRqB4+fKh2u610Oq1IJGKSgn6/r2w2q1wup1arZZ22JpOJsZJIM66urtRut5XL5czX6NRENyV09t1u1zTpweCxMxXzN6LRqA1nZLjkmzdvrNXw73//e+33exWLRQNpbAS9PgkVMSiVShmweZ6nVqul7XarZ8+e2doCpJvNRhcXF+r1emYnMJ+cFAB4sVjMZg553rHrW6fTsecCKCKRiNU/UNPgAkSxWDSfo6kAzBsnFdg/LaSl++J9mMx3E3XAAw27y+z51/e7fIz0MdLHSB8jfYz8uDEy+MUXX/wgIPmf//N/fpHNZk2zzc3glAR8juhg76R7LTrHnAQQgg+BZbPZWICloA8A4Ahcui/oRrOMMS2XSws2LgPBMTfFjHQNApBKpZJ6vZ7pVgmykuz7OQJ+/fq1gR3H6zCBg8FAkuwNP5lM6urqyob1MWjx+vpauVxO2+3WWJV8Pn/CMMLGuVIHgux2uzUmiEJU960cXTqBo9Pp2GRygJLCYOnYOjUYDOrm5kZPnjwxXThH9AyhWyyOA/VCoZAqlYoxXXzXYDBQJpPR2dmZBdJYLGYFl/v93oqgAXlJZrzL5dLWDoYFEIapA7DRepMEkMDQfhVQIFjChsEuxuNxY6BGo5FSqZQNfzwcDtYVSJK1QgWEYEvoWEViEI1Gja3Z7/fK5/MnxZp0z5pOp5pOp6Ztv7q60uPHj/Xo0SM71icoDIdDk73k83lrw5vP57Xb3Q/i5F7xC2Zf3N3dqVKpKBwO2xo+ePDAkjWSQORMMI0Ex1Qqpclkojdv3phEZTab2fDSly9fGqDT1hVmut/vazgcKhaLqVKpKBAIWHEzzB12AEA/ffpUv/vd77TZbFStVtVoNCTJWDfuebvdajKZWE0DcafdbpukQpIxXwRufJ8YwfwcOrkRq7CdWOzYXppWwjxjNptVu93WdDo1aUQ6nbZ7GY/HBtjhcNhkF0iKZrOZFeuToIRCx2Jo2Dm3W1ogENBgMLDYg5Zcui9UR7KRTCYtQQDQiKHIuwCWdyUVyNOCwaB6vd7dF1988b9+EHB8RJePkT5G+hjpY6SPkR8HRo7HY/3qV7/6H9+GBT+4LfzhcDh50yVgccRJUZurBWZT3SC52+3sv3FOz/NM84kRcJROYIVRkGRAxlGiexQIA8S94RCAWzKZVLVaNbaJAEfwpJCUxQZM9vu9bm9v7TNgcjzPM/mCe1xK8ShOt1qtVKvVrEA2Eomc/D2a1FAoZB1vRqORHdFyX51OR7e3t3r+/LlevXplhsYRLgAOG4oEgOJAGLFCoaBCoWBv/ePxWE+fPtV2u7WWsjjBdDrVcDi0gIWDhcNhc0aKKBuNhklpYOdgIQmo3W7XNMqhUEjFYtF0+WiKh8OhDahcLBYaj8fmkLC4SGP4O+yTI2c3eSSQZjIZk8nwd0+ePJHnedaeFgYDVhd7x14Gg4Fms+PAvv1+byBxcXGhm5sbffPNN5rNZnrx4oWi0ajOzs4Ui8WsjSxgmEwm9cknn+ji4kL/+q//qkajYQW+1EdQnEkg3Ww2VpCNzIfiXoIHXYkAzdvbW/sst7gdvyYI7nY7A1GSH5KHhw8f6u7u7iTZ+vrrrw1cZ7OZEomEnj9/rs1mo1arZb7BfWy3W5XLZbNF/JlEo1qtmm38/ve/N4aZpBVWHr+DMUS/PZlMVCgULMGJxWJmI7RlJumDzefkYLfbaTAYqNlsqt/vmx3CnkYiEWWzWdVqNfM3WGWYTuyckwn+Qct+OBzU7/dt/fh54qckDQYDq1XZ7XYGaDCK7BuF6fgUrGK5XLaTFex/tzvODeJz+c535W7EXpex9q/vfvkY6WOkj5E+RvoY+XFg5F+0hku67/rC0ZsLHrAWvIWiY+emeWPE+Hlr5I0UxoUjVj4HDTXfEQgcOxPxlprL5RQOh3V3d2eyCY6QYTVIIJB1sHEUPHK8S/A7HA7K5/NWaIiDAqiwRbB2vBlXq1XT43Kcmc/nFQ6HrasRhYcEs/F4bEPp0OhzpMrGw4xkMhmVSiUtl0szoO12q1qtZsaPXAQ9NvuD/pfgN51OjQGA+Wu321ZAOhgM9PjxYws4tK0NhULGkvI58/ncjr73+2Ono4cPH9qxbCwWs4F/FCuu12tls1kLBDwPXbQAF/Syd3d32mw21jY0Go2arCOfz2u1WhnYwKoC6gSe8/NzjcdjPX/+/IRZ7nQ6JvtgOCDMCQGaGRuwM6lUSg8ePDAdNfrgUqlkgbXZbOrf/u3f9LOf/Uyed1+QiywGEOYzqdPALqfTqcLhsM1lga0ZDAamPw8Ggyc+lUwmrX4gkUjoRz/6kRWJwzh2Oh3tdjvV63VLxlirRCJhQR4GKBqNajAYKBQK6de//rUeP35sdRrVatXW2/OOc2Emk4kxprBz8/lc19fXxtjhj9lsVtfX16avv7q6Ur1et9oQai6kI5MZDoftmYkDaPsTiYTq9bpev36tfD6vwWCgxWJhDCTJoXSUjvz4xz9WoVBQv9+3tQbIYfnv7u6MKd9sNiZ7qFarVt9AB6bdbqdms2mBnXWB5aaexk2uOSUgYSUGEof4N/U9xA7iB7EM/yTJgyXH5wFSt+sSSdJ0OrXkl5/hvvzr+10+RvoY6WOkj5E+Rv71YyQ/823XB5nDRcDgWC4Wi9n0eY7aCNiwJxgDUgekC5IMgHAA9OQc6fEPQRwACYeP8yiYq7BcLm1DeTPt9Xr21g+bRGHgcrm04lWOs91j2P1+f1L0zNs7x4swIBTuuV1t3j2WvLi4MFC7uLjQv//7v2u1WllBM0MsAZD1+n7OCDr1/X5vxdOSVK/X1Wq1rM0qb/XB4Gm3JQb2cdQeCAT09u3bk6nn6OwpcsW4nz59am1zKax1nQB7gNWAOUPWMZ1OVa/X1e/3NRgM5HmezTZJJBJqtVrGMMEARaNRG2IJ24IcIpfLqd1uK5lMajKZmGyCpIUagsFgoNXq2Po0lUrZXI9er6d2u20M3WeffaZgMGiT5T3Ps1ku6PhXq9XJ2tKulO+BxYOJwdmDwaAajYZ+8pOf6O7uzoIhGm2m25MU7Pd7PXjwQMlk0iauS7Li9P3+fp7PZrNRrVZTNBq1QIJWHFa5XC4rm82aXv5wOBj7gzaeI31kF/gHjDs2jN8lEgn1+301m01J0ueff66f/OQnljBkMhl99dVXKpVKNtTwm2++UTgc1qNHjwx0uCcCO3pxWH008rPZTN98843K5bIxmAAmwI8fEleKxaIVx4/HY9VqNdP8c4qw2WxMYjIcDhWNRq0AmORlPB6bnzPUdTKZ2IkCa0XdxrtJNWxqLpfT3d2dFd0DaPiNm3ijLSfmsO6w8nRucllp4iCSGJJ94ibSNr6HWg9+hueGgST2EO/86/tdPkb6GOljpI+RPkZ+HBj5vusHSwo5WvY8z47i3bc8dOD8DBvH2yZ6cz5H0okRu5vHkSALiWGj5YSlIcjBAFGkOZ/PrcCT7xkOh+p0OtYJRZKxJ5vNxtqCZrNZ1et123CCOUwBz7NarexeYPgwhMlkok6no/1+r0qlYg4wmUxMmwvrhVPzxg/T1uv1jCU8Pz+3CfZoqWHheINnQCGdX5BfcIyLhKXRaJiBX11dWYLAETLyCSQKjUbDWBCSCFisfD5vx7MEYuk4p0WSbm5uzHhxBIALRpSBjBRFo89tNBo6HA5mYxw7JxIJnZ+fG5NYKBTkeZ46nY4xp2dnZ2Zr7DWdp3K5nGq1mkKhkL7++msFg0Fls1mdnZ3ps88+U71eN6kKe5pIJGwuyH5/310H3TyF1KvVSs1m0wD4yy+/VCKRUC6XUy6XUzKZVKvV0ng8tn1BonN3d2f6aBIjCmlhv7D5/X5vTBzMUDx+HEBZKpVMbhMKhUyXXalUtF6v9fz5cxUKBZs4T41Du90+Bor/DlaAvCRruxsMBnV5eal2u61/+Zd/sQn3HOfX63X7bqQciURCpVLJfODx48f6m7/5G6XTadXrdXv2aDSq29tbC/4wW26coeifpI6EjTav7XZbf/7zn3U4HGyPaIMLe43cI5PJaDqdWtcotOckidQukHym02kr/id5giWPRqPGtrvyCmITe5DL5UyCQhxBakO8m06nJ3NHJFkyACCgq280GhoOh8YaYvPSfftn7hOgIw5QxM9LABKi97F2/vX+y8dIHyN9jPQx0sdIHyM/2AlXMBg0J4L1cIvveIt1tcncKIvBQ2G4bvEuoEJg4884ygSYDofjXIxOp2PBEqaKAMCCUizKUbw7eHG5XGo2m+nBgwfGGEmy9rY4/Wg0MvaAdeA50Mi6bV5dfTaFrIFAwAoFw+GwyuWyyUGm0+lJEd9yudRgMLDPCwQCmkwmSqfTWi6X1sUI1qLValnBIUxbLBYz9ovjXrdAlb3B+GDw0EDjrNzXfr8/GWrYbDY1HA6PBvbfgX2z2VgQcOsY+v2+ra/neapWq3aED2u63W7VbDZNtjAYDJTNZs1h0DnTqQeZAAH9j3/8o6bTqeLx40BFnItjY4Kly/IiX+G4mmBCUEYeQ2K02WxMUuAWkx4OB3W7XWuN3Ol0DFgKhYJSqZQxne12W51Ox7obkTjc3NyY3VFbQeEzviDJpDEEM9gsSZrNZrq7uzMQ2+12evPmjdknrHUymbQaAHTgJE8AGAwR94OfXl5eqtvtajqdKho9DmlcrY7tnj3Ps3a+l5eXWq1Wmk6nxn4vl0uzn1AoZH5HYwDXJvP5vMUQCvb5ffw+lUpZkH/79q22262q1ar+8Ic/aLPZqFgsnszb2O/3J4nmZDLR+fm5FQTPZjNjRymC3+12KpVK9juw++1222LTfr+3mgsKywE0/JAkajQaWR0A3aQoFEerjnQKho3ECt0980roNMf6kVRvt8chtzw7iR41KzDSyL6IbawDP+9f3/3yMdLHSB8jfYyUfIz8GDDyfdcHqeHiS3BQ3ghZJAyc4BIIBEwOsFqtjFHY7Xb2MOFw2Ipv+W/3LRtAcHXlHLPzpo+js4iuwy2XS1sggkcoFLKZAKHQsZsQDkFgYvFxLrT2gCBvwEgMUqmUMQ2BQEC1Ws2YhlQqpXw+r2Qyqd1uZ//meZF7cPQsySQls9nMDIfp4Mgn0Oh2u10r1sSxKHLkODkUCtkxPEHx8vJSh8OxSBEGUJKxVgAH7AU1CbANBM1o9Dg/g9/nKJsOM7TxJUgRVFxmFmYOGU4+n9ft7a1Go5Gi0ai1a+VYHk0++4tdwPBSd5DNZq3D18XFhUajkUlXarWa3rx5Y8EKFgZGhWNvAhhBz2WSO52OdfDC/nq9nsLhsD799FMLghzPJxIJPX361PYWrXMwGFS/31er1TL2mIJU5r7Qkpk14sh7s9kYaxaJRFSr1SwRoXNQMpm0RKHVahkDBYv89OlTA0UKskm8+v2+fT5FvaVSSf/1X/9l9jEajRSJRDSbzZTL5YytC4WOhfv1et0+CyChwL5arSoej2uxWGgwGFjw5DsLhYLy+bzVtzArhiRnu90aq1wqlYz9kmRsG8Ga76aTEknafn+c64OPE3uQAFFITMJGPKC1LHU7yC+4LxgxZg1xWgFDCfOPf5GQcxLiyhiQs5Agk3CQkGazWVs/Bt2iu8duKIJ2mUFiO/UYoVDIfMq/vt/lY6SPkT5G+hjpY+RfP0by9992fZATLt4q2SSYEP4M5g59JIwJgYO3WAoPcWICExvE273LknEsytC7w+FgWmTYCIIkcgRYQ1eu4Wp9ATAK4tgsdO/ICNiwRCJhszzQWS8WC2sJu9/vTWcNi4YGnTajmUxG2+1xJol0f0TKPaLzhiljMB+OeXl5qdFoZEzeer1Ws9m0YIxWni40sEEEcoIUmmIYJgwZoz8cjp1pYKMCgePwTQAUx6BVLAAN+wGgjUYjnZ2dmePDlNEJCcmK286T+6BYEeaLdqaunIHfoWMRjMd2u1W9XreCW4JmJBJRsVjUZrPRgwcP1Gw2jYWRZCxMoVCwI20CFfdBAML22JtQKKTXr1+btOLt27cnbCmJ1bssIFKLRCJhE+Q3m41evXqlUql0wnZjM9igK7NxWZhKpWIBGOkLzwcTtFgs1G63VavVTGLAz2+3x05c2CU+2u/31W639fjxY52fnyudTms0Gllig89yP8QH7jkajVpxNbNFrq6uVKlUjM3r9/uWZPR6PR0OB9VqNUlSrVazfYnH4xoMBjaPJJFIWIcvpAkwkm/evFE2m1WxWFS/3zdW2WVF8WnABYCEwQfgAoGAhsOh1U8gecE20NMDfvF43GIOCRMsNv/gT8RRZD40X8AfYeJisePcEp4nHA4rHL6fjUMtD0lRPB43aRUxFvmTW4fBiQzf71/f/fIx0sdIHyN9jPQx8uPASPzh264PUsNFIOItj0DNYrrAggPwdzAN7s+9+2D8g76cQEMHGd6+g8GgsW3odnFuSabl5l54A+cIkns7HI6Fh7ztMynd8zzl83nlcjl7FhgqW9DAsei42+2atANd6na71VdffaVWq2XH+64Ug6JRGAKOL5fL49C529tbAxACZiAQ0NXVlX338+fPlU6nbcYBXW24FxfYmVjuHlW7E8rRESONuLy8NEeHoSBAbLdbk3yw7hwLS1I6nVa73T5xTNepAFGcEGYVJgKpBjM9CoWC3rx5o1AoZAEwl8sZe0H7YHevJVnNAsfFTFePRCLGFL148ULPnj3T5eWlyVAAdhIDEhfsHemEW4hMUJrP5yqVSpJk0+sPh2M3MJcVxG8AEzplrddrVatV2/disWjsMfIkEiAYILcWgVoJmF/+PhQKGfMLw0lCw6yYVqtla47siOQmmUzaUTqsNf8dDoctIQyHwycFxiSV4/FY/X7fBnW6x/ee55mNuENCAaBAIGDrAXtIEotP0m6ZTlyu3hvJE/YM8HFPweCxhXWpVDIwdBNiQPxd9gumj//nZ4g13Cf7iu8TC5H+cILBfCJkL9S+AFIk5q5Mq1gsmnSDgn8SaNaLpB4fc++JuILvAWqbzcZiqX9998vHSB8jfYz0MdLHyI8DI3kJ/bbrg7xwsYnIHfhC/g1A4IiwbjgVhs1/EwgJUgROWB6OIWElmLSNk7iTqQOBgKrVqg1LpK8/hlSpVJTJZOR5ni2W5913ZsKo0JYHg0F1Oh3FYsehcblcTtls1qQNSAxgzdhwt/CQ2R3o1jkKpcvOfD43Zgd2jTVhiCSsByB8d3enTCZjPzscDm29cKTNZmN/B1DCCjI0sNfrabc7dpshSCcSCUmyOSHFYlGRSMQKU3Fed4/cAlAYJZhSjoRhFJFuUDuw2+3MoWjDyjPA3nH8fHNzo1KppEajYYCfzWaVTqdtXQlUrEW325XneRakisWirQM2eXNzo0qlosvLS+uuROAZDocmqSFpcgMLGuT9fm8ynFKpZGtar9dVKpWMTUTzHA6HLahjj/F43KQAFIgDQHw3687zI/3i+VzGm5+DmSHgMN2e7+YfEjbpXurEvnIc3+l07OdXq5XK5bL2+71JdmD9VquVsYXITAhqDNmU7ot0SeI2m43+9Kc/qdPp2Pq60ixXhgMzDlgRGNGH48f4OpKM8XhsQZfGBujVSQj57mQyacXtbn0GySxtsd3ACxDCMBLsAQKSOmQ7ADKgSYIFM4utuuDssrlIOJBqEWPd1tokuLCAgCX3x37znNiAf32/y8dIHyN9jPQx0sfIjwMj/3+RFPIgrhxCuj+G5c/ct0dXC8nDusGGQOvq3/kMlzU8HA6aTqc2ZwMJw36/V6FQ0MXFhTqdjrXP5egyFDoWusLMsHmwHGwe+txisajD4dgCk85MkkxLyr2xaQASQQZZRyAQULvdVrVatful4NTt2gSDQytOjt5xaI6GF4uF/vjHPyoej+v8/Fy9Xs82P5FI2JHocDg8cRruHdYBdgOGDR09OuvZbKYnT55YhxZYD+6XI19AtNVqGXNBkMEmCGiwcoAkzkNAmM/narVaSiQSNv8EGUY6nTb9PZIb7A3nJ3kg8MHg4lTMTqEAGUZuPp/r5cuXikaj1uFnt9vZ2kQiEfvORCJxsnYwhSRIPMdoNLJZI7RhJRjBZKZSKfOB6XRqzBN7jS1lMhlLypArwXYD8tgt6xYOhw1EeIb1em3tfdk/ZsO4DBQSFxgm/BE5QiAQsGcB9BOJhM7Ozkw3DqiTSOAzm82xCBu9PW17KQTGRtwj/0wmY/ZJgTfafNrRAiQwo0gPeE46XsFsbTb3HddIVjgZQG5FLQbsFiCfTqdt8Kekk+fj80kQWCfiWjAYtCJyfAn2lXoaTi+ILyRoxE6e0fOO84nm8/kJY8fv8TvIYN79MzdOE1vR6/Ps/vX9Lh8jfYz0MdLHSB8jPw6MfN/1QehKV+YASMDWvHsRSDBSNka6n/Au6cQYCATu2zBv7+738sb+8OFDO2pMp9OKxWJ2RL/b7UwDPRwONRgMtN0eiwSXy6UtKMaDpjSRSJzM2iBIwHBhONFo1IxuuVwqn89Lup+DMJ/PlcvlTONeLpfNWfhMVzMKCFH06epmAUOOOa+vr/XJJ59oOp1ay1lX24o0wz3uxpnS6bQdMcOcMLsDVo0BiwAPTkEwJKiNRiNrubrb3Q/O3O126na7pu0laXD3HDCB5WWdCJ4wR8hhkBGQSMCIMvsBO8tkMsZ8wLTAkri2w97CLHa7XasHwMlJCGBR6LYEiyTJAhfafKQnsGoEYIJHLBbTcDi0+TCdTkez2Uy9Xk/5fF6NRsNYHoIlXaWwfwJdIpEwDTnF19j0dru1GRqBQMAAwk1oeBYSFrr7AHBo6Hn+s7MzYyvX67W1ZGa+CYkHyQNF6YAdyQSsIywYvh2Px3V5eWn7yXBL9PzIo9DyI6EJBI6FwJ7nWfE3CWs4HD6Rb7APAJhbvwDjh1QDhg/7Xa1WNisE1o/kgIu1J/AjbUG2gx0hi5hMJjo7OzNQ4n7xEf7BB5EFSbK1o1Cd5J1khEJ04i8+wn64QAf4Ay7fFtP96/98+RjpY6SPkT5G+hj514+R77s+yAsXgY8vdt8ocRgWAjaO/+fiLRtmCgcHeHiT5K0Zp93tdhYAu92ums2mzs7O7Lic4+rD4WBOxFst2k9XqsE9SvfAgPNjiOi5cSqMhvs7HI5dZnBQmAtYpv1+b/NAYrGYTX1nPgRv4awVBoCMAqYmGo3agDrP86xgmcCRSqWs3SigQSBEeoEEgPapOGgwGFQsFrNZKNls1oJboVA4YQWWy6UVNxI8ATzadVJcTDABGGCG4vG4zaSAiWDNkHmUy2VNp1NzVtgrd50lWcDkGWFCsS8YCmwNBpfgicYZlgigcTv2pNNp+zmYEQI43ZX4XqQIpVLpxE+wBRIYJAX9ft8YuOFwqHQ6bfYCswxzBYABvjwTReFIFUiKqB8goSJ4sPboufkel82mQxhBzAUVgic2Wup3AAAgAElEQVR6+FqtptFopFevXlliQxAm2cLP+F2SOe4XqQcdtbCRRCJhXatcfT4ADdNNAplMJpXP5y2I0pKXBMcN0nwnbB9BlcLy+XxuPsapBdIVbBH/lY7MHf+NBp19hOGnJkQ6AgHsoStlIB4RC7kAOP6M/efkAIYO4GCtYc+5X+piXDaY9eGe/Ov//vIx0sdIHyN9jPQx8uPGyA8mKeRm+DOCCSwcIENg5MFhUSSZw8N2uG+77zJ+OCH/5q252+3q5cuX+ulPf2oGQIvH6XSq8/NzY9E4hoZZcZkc3tbZSDe4wcrc3d0pm83a38OusPHuBoXDx/aSaKO3262ePHmi/X6vwWCgzWZjgwthOVhLWDhXfrHZbIz9W6/XVtBIpxkYqz//+c9m+O6RMY5dKBQMfGAuACcAi7f39Xpt3ZtwPJ4NwyMo8z2wfQRbPsOVfLDX6Gb5XJw2k8kYu1GpVMwJ3ARgPp8rlUrZXvDdFErCkASDQQuy7hR26X4AKWyJ53knjg2YYxuBQMB0xxydA8qApcsiAtLYCI4LKNDFh45SZ2dn6na7ms1mKpVKdm/7/VH/TvcqSdaGmEBG4JNkQHs4HKxVLglQqVRSu902cDwcDicaaHwBMCBw4rskQSQEdLRqNpvG9rIebqLJehD0YaoJ3m7Cw73OZrOTGgKkJUgD0ITjd3TnikSOM1xarZbm87kBlSuxQcJBfCF4chIRDodNtoBsg/VForBYLEzeAAPvFu5yb3y+C+bcL7bKM2OL/AMYwLZh48Qt4hXSE9aS5+Iz3T9zYymgSDwmBvGcnFL413e/fIz0MdLHSB8jfYz8ODDSPUl79/pgc7hwLv6bm2OjXMbE/TsewH0bJkDxRu8+ND+P0QWDxynUHPFnMhnd3t6qXC7r7OxMnU7HAAsnAcwAGoouYQsJlrS1TaVSFqxCoWObzLu7u5NBidwvaxAKHQtdOXaPxWKmu5ZkRaW1Wk3tdlvD4dCOqXO5nILBoLrdrj0vQdntviQdWZxCoWCshHRkCer1uiaTibEdMIoYI2vCEXI4HNZ8PjcZBIZD5x7WPJ1Oq1Qqqd/vG9Ox2+3sed51RI6uPc9TOp22SeE4EsfD3HswGLSADIvjecciX5d5oHVxPp+3ID+dTk2Kwr7xPQREt0YB4N7v9xY4GSqJxnw+nyuTydj9wLS4rXhXq9WJlCOVShmD1u12NZ/P9eDBA7VaLW23x+GCh8PBWirDlMKu8HzBYNA03bFYzLqNVSoV9Xo99ft967AD4wm49Ho9zedz67603W6tDsOVdaB/BwiYvYJ0hCN2GErkN9j4crm0AnDpfognXdLYP+k+iOHnFNUCcgRjpEzEicXiOMDy9vZWmUzGkn58gGGISJIIukiD8C0XqIgBsHaw8Tzjfr8/6TiFLIHfJ9jjQxQcs1Ywjcx0cb/XPaGAMSd+BQIBK/x2pTj8DicHACZrS9KEhCMej6vb7Ro4STqxDxd8id3uhY8AVO8DEf/6P18+RvoY6WOkj5GSj5EfM0b+4BcuNoKNIYDAMBG4WGRYGVgh2BxXP4p2k0DlHgPDlBAk0JtzRPjgwQO1220LuslkUqPRyDao1WpZQSsBjm4/MGUwb2jC0WN7nqeXL1/qT3/6k4rFommJ+axIJGJyA4IDrBdBOhKJGHN4dXWln/70p7q7uzvpKIVhUASL4fH3sH90bEEmsNkch+795Cc/ked5ev78uYG4yyDAnnH0j1ETqAEQANXzPANsgkuz2bSABOPG3uCoBN9sNqtCoWBrPJlM7GcqlYqtEwYLu8jQQOwjGAzaDBXkF9idGwRLpZLS6bSxJ+w3rBWfDwtGETNFpgQDV64C0wggkVy4bMd2e+y8lclk1Ov1FAgEVKlUNBgMFA6HVavVDOBxeBIc1pJuO9Q3IJVg3fr9vj777DNtNhsrtEVvj61sNhtjYGGekCjNZjMtFgvTeMNAep5nAZ3vRipEkoHUwS3ahe2TZImS689owLELbBC/Ho1GCgQCyuVyluzAugNy+DL2MR6PVS6Xre01n+V2F3KLzOPxuG5ubhQOh00v7zLak8lE9XpduVxO4/HYJAokr9RnwArzHMViUaPRSJJMUkSbX/YUQIK9RN/uAhbxDQAhDhEb8WHiDD9HzKAFNQxkOp02cHXtl3t3k49vSwBZZ2yDuCPdA7h/fffLx0gfI32M9DFS8jHyY8DI910fRFIIQLgsHQtCwOcN0D0uhZHjaBvH5LP2+70NzHOLT1k4jv9pIwvzlslkFIvFLOAw7BCGqt1uW4emWCxmzoDTYZQ4SCwW0+PHj9XpdPTq1SubVM+molHGEGDZ3KF8BFI2ttlsqlarmaYe/THHxhwnHw4HC+wwOAQJWohWq1WbFzKZTPTpp5+q1WqZlGCxOA7pQ0KB8Uqy73DZS9hTHJMhlTCEg8HAZB+AvutoLgOYz+eVTCatmw0OkMlk7JicwIuh0+6TZ2I/IpGIbm9vNRqN9OzZMytgJejheLBG7CtgHA6HbSo5TB8TxpG9uEfTSCRwSNfZsL/xeGzT4ZkJwvE8gydJqBKJhH0PGmn8gt9jzwjag8HAkhaCN2B2OByLY9Eak7jAZsKwIe9hT6bTqa0xxddIUNLptO0f97ndbm04qsuc8XPsHe1x8/m84vG4UqmUhsOh1Y/A5FFz4N4nP0PgLZfLJ4DNoEJYPRht2EhsF+kGDOV2u1Uul1O1WjVJkctGAXjuuvPfMHvEK35uNBpps9moVCoZ0+4GeJJK1ot1gj2LRqNmF/wZfk98A6Q5DUBq40pL3FMS/p/1RXJGAkadDv5O8sLvSDoBmXfjtbue/vX9Lh8jfYz0MdLHSB8jPw6M/ItKCgn6HOuxGGwATsjivMv0EezQ1mIc7mfxHSwqgMVxciAQMAdJp9PGDsbjcb169cr+H1aBzyEwwNLANMDYhULHGR6PHz9Wv9/X9fW1UqmUdeNBykGRcSQSseJSgiLguFwurT2mG8yYBcKAP8CNFpho0mECADzYLNY9kUjo7du3NvDt9vZWg8HAinfpkAOTxb3DpgAoFP/hnLAF8/lciUTCfq9QKJwE1Wg0aoMOmRlSq9W02+3UbDYNUMLhsIrFohkvGu7D4WCyBByAz06n05rNZlYom0wmrZibQYSwEOwjNgd44GwApCvrQS8Oa4e+3+0UBDCx5gQs1s7dj/V6bXrzYPA4owYGGLDmHgFpGDSCH4wJBeeLxULlclnhcFhXV1eqVqtqNBpWT0DAms1m2mw2JisAaGjdCuC8evVKz549M/lHr9fTfr+3drckGxQUs6fM0yHBo0MU3c1IskqlklqtlgE92nISPtaAQArDR+EvCRiMfj6fN99xazg4JVgul8aEvbvOnU7HZrwQc2CbD4eDsXPRaNTYbAIviYVrI8yKQfZBzQryBPzHZQFJdABh6b4tLhIZ1o/EhPsHNCORiMlSttv7lsbEUGQX7okKMg5+ljXGlqkrgekjGSSBcu/JlZz413e/fIz0MdLHSB8jfYz0MfKDnHDBPPBlLoC48glu0t1owMIFHx7M3RTeKDm2ZRHC4bB1LeINNZVKKZ1O65tvvjF2jTdUCie5Z9gN7oGN4Tg7l8tpOp3qzZs3yufzCoePRZHoTt1gHIlEVKvVtFqtTBPLRGsmc9Olh9kWo9HI2B66ISEHgaEDPGFA4vG46eoZZjmbzTSZTIwhu76+PjlOh+HL5XJqt9taLpdWBM1aRqNRC/oc8yYSCQNdZA7xeNxA2PM8c3wkCm63oNFopFQqZd17kJKs12vrolOtVk3vznMSgNFvM3iP9Qc4CLSpVMqA1S2sdFkX2F6chkAbi8WUTCYtKdlut8Ya0bKY+gKOsFOplK0HkoRw+Dg5ngCIs0qyQE9Q4Ls5Pqeodb/fW0Ct1+sneniApdfr6ZNPPpF039WHNQUcsXckFIA2xarz+VyvX7+2CfH9fl+bzUbJZNIAhfoLQJ9kCxlMMBg0KUgkcpwpUyqVNBqNFIvFdHZ2pn6/b7IU2KrBYGAJGEf7MIT8XD6fN9Dgubh/dOAkfu6RP2tENzIaAAAe/X7fah0IuNgPn+UWFi+XSw0GA4srgcBxFgpBHaDOZrP2uQA7ci33ZIIkERYbwInH40qn01oulxY7eBZsgn8TP5HwsDfBYNAGZUr6f9s78+ioq/P/vz+zr5nJzGQySQhZCESImBZCIkT8gSAQZXEDrFvP8QC2WtwVteJXbautx6W21T+0at04RywgqEBFlnoUEJQtlE0gewJZJpNZMltm5vdH+lw+CUkgEizL8zonR0nmc+fzuZ97n/e9z32ee0UYCq0qULsnDxz1D3kMO+XIUL8hG0w2jtozc/qwRrJGskayRrJGXhwaSe2qJwZEPSl2m5b4qfPSA9BN00un/6dGT/9OJBKiM5NgUGOmCgROHBRJ4kMzVDLiKSkpUCg6E+tsNht8Ph8UCsVJXgiqQPL+kEeCngcATCYTDh8+LJZvyXMGQHQs8vI1NzcLUWhubobP50Nubq4QPGrUgUAAeXl5cDqd+P777xGJRBAOh8WLJy8neYjI+0QJodRxIpEI9Ho9fD4fnE4nKioq4Ha74ff74fF4RHIshS1Q/VIyLQBhIOgeKOyAlnupo5HRoXhums0DEEuz5BWljkrvnwwxCQDlClBnpTokrwKdtUADCrfbDZfLJWL2ybsCQAhSa2uriMmlU++DwaDw8kWjUbFrkbwz0XbEciMCQGxxS4aYhCiRSIhdrlpaWuByuUT7MZlMcLlciEaj4j0oFApx8KbFYoFKpRLGlNoReWapU1OoAXmD2trahIAHAgEMGTKkizGk9kXvUq1Wi3umPub3+6HRaMSAiNo6eY8zMzOFR4z6gtfrRSLReQaIfECYlJQkRMnpdIqEb6vVKsIfqqqqkJ+fL0TT6XQCQBfPJp16r9VqhWeKzoiRx2lTTL1arRbeMjKgFJrU0dEhknB9Ph8sFgsMBoNo4+3t7WhuboZWq4XX60VycjI8Ho+I26ZwEaoXGlxSfUiSJHJBaCWC6hHoPBOE+isJlzwcjPpS91APACJmnsKfKKxEpVKJXBHqK/Ss8nySeDwuwjbUarXw1NIzUPsjz6lSeWJr22g0KgaO1PfIFlP/lSRJDASYHwdrJGskayRrJGvkxa2RAzLhophLmmFSJXef8SUSCdGY1Gq1qDTyFpDXhZbvKBGRZpNkBKjTSNKJU6lVqhMnSwOdy4R0PgR1UAqVoPsj40HfTTsxUcMkjxHtKEOeGgq/oKVDMjL0zGazGXa7XXhSjEYjMjMzcfToUTgcDvH/lPio0+lw7NgxEYsaj3cmClK8r8ViEfXkdDrh8/nECx80aBAUis4DGh0Oh+gU1AG02s6zMpqbm4VBo7hvhUIhPH5yDya9x2g0CpvNJpbFyUPX1NQkEippMEBeABKPSKTzvBESAPJWULkUCmAwGIQH0mg0ijZB75TOsKBwGLo3Ohyzo6NDlEPCSQJF3hyFQiG8lXTuRFtbm+h8tPRMuQjUXmgnJhpcRKNRJCcnCyOWkZEhRIt+qF3FYjFYLBa0tbUJzxvFiVPHJoNPXmgAovMCEInAlEBOBoPaHG25SsJNBpIMDuVskDecjBZ5pqk/tba2oqmpCZdddhmam5sRjUaRkZGB6upqWCwWEaoQCoXEO6A8B/L6ktElg2+z2VBRUQGr1Qq3241AIACNpvMQQ/KCyQeRclE5fvy4GBBQX1Or1aitrRVtgbzctApgMpnEM1GeBu1WRUJECd/UTuPxuMhlIVGlwQoZbhIE+i55fgUNqmiQ1dLSImwT1Q3ZIhooUDgF9VMKuaEwJ/obCUUkEoHX6xV2guqI7CzZL7LDNCAjWyu3w/Q5amfU9smO0bsg+0P3CpzI8aD2x/QP1kjWSNZI1kjWyAtfI/tiQEIKyWMnb6B0cxTbCEBUEH2WvAUELbFT449Go6LjAScOX6TOQEazpaUFKpVKCJBKpRJx6pRAS4f7kThR3LB8ydLj8QgDRkbFZrOhvb1dbJUaDoe7xI1T47FYLCKGValUwmazCY+dSqVCamoq6uvr4XK5kJqaiv3793cJy6CGodPpkJKSgmg0Ks4eoTCJvLw8sYRN3gu3242cnByEQid2fqK60Ov1sFgsOHbsGBKJBNLT04XXRKFQCMMqSZ07UdG9U7y4wWAQHYZi2VtaWrokN9LSNTU0tVot3ht5dGKxzkRNipPtHo+clJQkDCHVAbUHWmamEIRIJCK8CSTUFHoijweWx6VTrDF5TCiuWf5vOjciEAgIL408fl/u0aVEX4VCIfIYkpKSRNsgAwEAVqsVQKc38NChQ0J8KHGWvGDyMz7IG63T6dDS0gK1Wg2r1SoEgd47eQ1pWZ7O0klKShJL9vL3ScbVYDCgoaFBtGES3La2NjgcDjidTpHorFKpkJmZKXYxk+cQKBQKMdghg9vW1oa2tjZkZWWJsAB6ZrIDFH9Pu3mRcHu9XpF7YLPZkJKSIkKqyLOm1WrR2NgIlUoFj8cjPMLkUaSQCgqHOX78ONLS0sQAgHIL5MnbNDgim0Mec3kb9Pv9ov1Se6QBH7UPh8MhhIE8+z6fTwxE9Xq9WNWgQQF5CElcyZ6QV44GfWRH2traxOAXOLH1MH2evI8U4kL9jtqKPPSM7DfZamrrZHNogEliQ8LK9A/WSNZI1kjWSNbIi0Mj+2JAtoWnLVfpwehmqUNSR6SZJQARixyPx8V1dC1w4mRteaIkzeLJ00ENJisrCwqFApWVlWLpWafTiZAIiiMmDxsA4XEAIGJT6SXSzDwWiyElJUV4A+hz4XBYVLT8hZtMJhFTTmEBdF4G0OllCQaDOHz4sNh+NSsrC1VVVdDr9cjJyekSv0uCplKpkJGRAYvFgsOHD4v4XfK21dfXw+l0wmq1iu1PqSM2NjZCkiQ4nU6o1eouyYDUMVpbW8USss1mE/VPUCOnpe9wOAybzSaMqVarRXJysoidpY5KoQEU8jB48GCxUxA13GAwKHZgisfjSElJEcvL1Eno/dOggmLUvV6v8GYcO3YMdrtd7K4VDAZF7L/ZbBbL5JFIRLxzGhyQx5K8gSTYtFQvT7z0er3wer1wOp3i0EK5MFFCMD0ftZdoNIrBgweLLVJDoRC8Xi8MBgPMZrMQYUmSxG5HiUQCKSkpoj/5fD7U19cLbyglR1McfyAQEN4/Ct9QKDq33VUqlaiurkYikUBNTY0wJnq9Hunp6aipqREGk06dlyQJHo9H9KWmpiYRilBbWyvip9vb20VcOoWB0M5jHo8HFosFPp9PbEFLHl9qh+Slb21tRUdHh9i+mNqHXq9HY2Oj8NqSZ93hcAhhJuNOce40SLRYLKipqRGeVjLmFG5D7ZTivEn45J5mCu0hD25dXZ0YtFD+Au2YRZ5Eqn8K5aFEbXp2EhOyMzSoJJGlnBMAXdoS2RoSIbJn1EZISCm3hHbqorwcugcK/SK7RIJFXnUScOr/ZCuoDzKnD2skayRrJGska+TFoZF9TbrOeMJFs1v6IWi2TTP01tZWsWwpv0lakpP/P71gmk2TR41ihcnwK5VKcYZFcnIykpOTEY/HReKlVqsVW8rSC2xsbBQdm66nOFwSm7q6OlHZHo8H+fn5kCQJDQ0NwhOk1+vFUq7X60VVVRVcLheGDRsmGjh5u8iYyQ2ZXq/HsWPHoNfrYbfbxZIo7aJEXri2tjYYDAZkZWWhra0NjY2NSCQS4uyK9vZ2HDx4EMOGDYPFYkFDQ4O4d1r6Bjq9R2lpaejo6BBbiSoUnbsMUdyz3+8Xz0j3Sp4KanAUvtDQ0CCEnjxfVI/kNbDb7SI3AAAOHz4sOgqJnlLZucVmKBSCyWQSMd0AhIB5vd4uxpFEhQYxra2tUCg6z0OhgwLJU0OeYOpsJAz0eyovFouJbXzpe8i4qVQqYeBI5KiTUhw/tWEK/aBYYAorUCqV4qBGADAYDMIwUOyyWq0WOxlJkiS8mJSXQAYiHA4LrymFBVB71+l0OH78uLi2tbUVDodDPGsikRBhF3q9HvX19aINUHgNxfFTjgC14YaGBhFCQ57XWCwGs9ksPMF0H8eOHRPecvKgezwekZROniT6O3npSXQaGhpE3oVer4fH4xGhNElJSSKngjzc1JaDwaAQHmp7arUadXV1YvADdBpoaiPUnymBHYBow5RwnZycjGAwKN4viRaFWSQSCdFGyMZ5PB5hpyjRmtoyhbhQyAuJm0KhEO+aBIXyVuS5PzRgoIEMiZ3P5wMAEYNOA5NAIACz2SxCxshG0nXkgVQoFKJOKGyI7o9DCn8crJGskayRrJGskReHRsojGLoj9fXH0yE7OzvxxBNPdBb236U18iCQ5448LzTzk98UiQd58eQVSzHGJDLyiqQXRgJmMBi6xEwrFJ2JmGRoyPPmdrtFeAAtKZMQJhIJEVtNzyBJktgNhmKaCTKufr8fgUBALPvTC6FnIO+Mx+MRgkcNnYy02+2GyWQSRoq8QuTNSEtLg9/vF0u7tD0neZRMJhPS09NRX1+PoUOHio5Py7YAxJaplCRLIQe07EzCQXVODZoMJYW/UFw/GQ7y0pIHhIwWlUkxzPRsZOhoubp7GATVLV1Lngt5oiQZEI1GI5an6VnJq0vX0RK0RqMRQkvvl9ohefLIwKnV6i5bqOr1etFWSGjJIJBXg+pHqVSKTksGlepCkk4c1kceZOoPFA5Ev6O2QQaHciMonp08cPLwDwovIANEhguA8DqTF5d2r5K/U6pbMn506CKdbUFtigZ1ZMRpYEZhMvK+Te+MPPYUzkH1S6EU5HGTx49TLgudkUI5BpT8T6FXJBrkvacBB3nF5B532smLnofaJAk+1avcTun1euF9p0EetUcaPNA9yEMY5PkH9Fm5kJJHkN4n9X2yM9THqH+R9xFAl++lZ6UyyO7KQ55oNYAG6eRFl7d5sgHUH6kN0juNxWKYP3/+94lEoqi/WnGxwhrJGskayRrJGnlxaOQzzzyDysrKHpe5znjCVVRUlNi+ffuJAqUTyb/9Qd6xGYZhmHMThULBE65+wBrJMAxzcTBmzBh89913PU64BmTTDJopyz1wFH7QPZ5R/m/yfMiT4+QzxTOlvwJFn6UZbvd77qms7n8bqHvvXmZfz9K9ns9nYe5ef92fYyDr92zSU9s/298nR94G5G20p/rt7T7lZVJ5A/1c8nJ7+ltf9Nbn+tMXT7efdK/D7td1/84f2wcH0o709r5Pp/yzYc8uVlgjWSMHEtbIH/99clgjWSPPhkb29UxnPOGi0AQAaGtrw549e5BIdO7ek5OTA6vViqamJlRUVCAjIwOZmZldQg7cbjf2798PAMjLy0NaWlqvD3o6L6j7Z3oSs97KoKVCAF0OxjudCqUQgf7Q3wbTn8+fL0b3VJyvz/FT33dP39fXQK6v33X/m3yQRQZKfh0t0ctDjHqCru+tHPnn5IPKvvpfT4JJ/+2p/J4MLIWxyH9OF/o++WYS3UNxiL7KldcLfbb7ILL73051T92ftXu99/dZmR8Ha2QnrJFnh/P1OVgjT4Y18vzXyL4+PyDncNHN79+/H7fddhuAzhjUsWPHYsGCBThw4ACef/55zJs3D3fddZdI5PR4PHj22Wfx+eefQ6lUYvjw4Xj55ZcxdOhQESPa2wxUXin0bzLo8n8DEGV1r9juNDc3Y+PGjdBqtZg4caLYJrenZ+3pHuS/7+7FlCO/5nw1lszFAbVd+fatJpOpS3K22+1GNNp5/oo896CncqLRKPx+P8LhMEwmk9gVibaHpusoubm3/kG7ZtGOWZTXQAcjUhIvxaQDnXaAzjKhAS1tLR2Px7scBHm6dSNJnfHlzc3NiMVi4gwf+Xd2dHSImHWqm57sD50FJEkS7Ha7SNSm76JduSguvrdBM91XMBjs8jx0n0qlEna7vcsW4szZhTWSNZK5MGGN7LtuWCNPMCATLiIWi6GlpQWjRo1CcXExli9fjkQigUsuuQQ+n08khFJYxbJly7BkyRLcdNNNyMvLw+OPP47hw4fjhRdeQDweh9/vF2csOJ1OkUDZ1tYGv98Pq9UqDhmMxWJITU2FQqFAXV0dJElCRkYGwuGw2IEpNTVVJAc2NTXBYDCIhEC1Wo2dO3fixRdfhMlkgs1mw6hRo8TBenISiQRaW1vFCe6pqamiUdHJ4DabDTqdTuyCJEmSONvA4XCIMz1YUJhzFbkA7NmzB6tXr0ZtbS3y8/MxZ84cuFwubN68GZ9//jkCgQBGjhyJOXPmIDk5uUu7pnLC4TB27tyJNWvWoLGxEYWFhZg5cyZaW1uxbt06kUQfiURQVFSEsrIycS9y4xmLxXDw4EFs3LhRfO+4ceMgSRI2bNiAr7/+Gu3t7Zg8eTKmTZsmzgxpbGzEunXrMHPmTKSkpCAYDGLDhg1Ys2YNFAoFiouLMWvWLJhMJvGdp6qbSCSCTZs24ZNPPkEsFkNhYSFuueUWWCwWRCIR1NXV4dChQ8jLy8OQIUN6Lc/n82HlypX46quvAACTJk3C9ddfLw5ldLvdWLNmDQwGA6699loh2t3vh7bCPXToEPbt24e5c+eK+/z000+xdu1aGI1G3HTTTRg3bhzvOvgTwxrJGslcOLBGskb2hwGdcNHONnl5eXjggQfg8XhQUVEBh8PRZRatUCgQDoexbt06KJVKLFq0CE6nEx9//DEOHTqEeDyOhoYGvPfee9i7dy9MJhPKysowZcoUHDhwACtXrkRlZSUuvfRSzJ49G+vXr0d5eTkeeughaLVaPPfcc8jLy8P8+fOxfPlyfP3114jFYigtLcWNN96IxsZGvPzyy0hJSYHf70dqaipSUlLwxRdfoKqqClqtFi+++CIeeeQRjB07tsuBfgBw5MgRvPfee/jhhx+QlpaG6667DmPGjMHOnTuxbNkyNDc34/LLL8eoUaOwdu1aVFVVQZI6d3JqaF7AP9kAABNwSURBVGjA1VdfjVtvvVXsLMUw5yqJRAKVlZX429/+hiNHjiAzMxNvvfUWzGYzJk+ejMWLF0Oj0WD48OF48skn0d7ejgcffLDLzkBApxfryJEjeOWVV1BfX4/09HS8/fbbMBgMyM3NRUNDAwKBAOrq6rB27Vrcf//9KCsrE/krci9WQ0MDnnvuOZSXl8PpdOLTTz/FAw88AKVSiRdffFHsVvb8888jKysLw4YNw8aNG7FmzRqsXr0aEydOhMPhwK5du/D4448jLS0NDocDr7zyCiwWC6ZPn37KQR556Pfu3Yv7778f6enpyMnJwR//+EeYzWZcf/312Lp1Kz777DMcPHgQDz/8MHJzcwF0bsd84MABOBwOZGRkQKVSYfXq1Xj00Udx9dVXIxAIYNGiRcjKysLYsWMRDoexadMmLF68GKWlpbjqqqug1+vhdrtRUVGB1NRUpKWlQaFQoL6+Hlu3bsXf//53KBQKzJkzB5IkYceOHXjqqacwevRo7Nu3DzU1NRg+fDgcDgcPaH9CWCNZI5kLC9bI3uuFNbIrAzrhotCAjo4OJCUlYdiwYSgvL0dzc/NJMZOBQAC1tbViq0UAuPfee2GxWBCNRvHaa6/hzTffxIQJE7Bjxw5UV1fDbDbj448/xrfffovMzEy8//77YivUFStWYPz48dDr9fjwww/x5z//GV9++SUee+wxWCwWSJKEL7/8EhkZGXA4HFi6dCnUajVGjBiBCRMmiK07SRDl8enyuNBwOIwXXngBn332GSZMmIBvvvkGNTU1ePjhh/Hqq69i3759yM7OxjvvvIO9e/fiu+++E95G2uqyvLwcZWVlSE1NHcjqZ5gBQ95fy8vLceDAAcybNw/XXXcdfv3rX+Orr75CTk4Ojh49it///ve45ZZbsHHjRnzwwQd48MEHTzJO5AE8ePAgHnroIVxxxRVYtGgRvv32W0yaNAm/+93voFar8dZbb2HLli246qqreo2fbmxsxI4dO3DbbbehuLgY//d//4evvvpKbK38+OOPIx6P44477sA333yD7Oxs1NTUYNeuXQiFQmI72E8++QSBQACvvvoqYrEYFixYgPXr12Pq1KldDvLsCVqB+Pjjj3H8+HF89NFHyMzMxH/+8x8sX74cU6dORX19PX744Qd4PJ4uoQlNTU14++23MW7cONxwww2QJAnvvPMOnE4nXn/9dTQ2NmLs2LFYtWoVLr/8ctTU1ODdd99Fc3NzF69dRUUF3nnnHUyZMgVTp06FRqOB3+9HZWUl9u7di8LCQvHZZcuWwWQy4f7778cXX3whVjSYnxbWSNZI5sKANZI1sr8oTv2RH1Hof+NWw+FwF7GQJ/nRAWIAREVv2rQJW7ZsQSgUwpIlS1BYWIiXXnoJDz/8MIYNG4a9e/di165dmDJlCp599lnk5uZi7dq1GDp0KAYPHoxVq1bho48+QkpKCqZPn45///vfcLvdSEtLg9PphCRJ2LlzJxQKBYxGIyZPnowXXngB8+fPx5w5c3D33XcjJycH+fn5eOqpp1BUVNTFw0YiuHbtWpSWluKll17Cr371K6SlpWHbtm345ptvMGnSJPzhD3/AkCFDUF5ejkAggGnTpqGgoACXXHIJMjMzxYnl7LljznUSiQTcbjcUCgXS0tJgtVoxYsQIVFRUiMNRN2/ejCVLliAYDKKsrAzRaBThcLjLTzAYRFNTE7RaLbKzs+F0OpGdnY3Kyko0NzeLM0jWrl2LjIwM/PznP0c4HEYkEulSTiQSgcvlwiOPPILp06eL2G+DwQC3242MjAwkJycjNzcX6enp2L59O9RqNebNm4fx48eLs2nIm0ifpeeqrq5GS0uLePZTcfDgQdjtdmRnZ0Or1aK0tBSVlZVob2/HrFmzMGXKlJNyVJKTk1FWVoaRI0eKc2oqKiowYsQIEX41cuRIbN26FX6/HytWrMC+ffuQlZUlwrsSiQTS0tJQVlaG/Px84eHMy8vDwoULMWjQIHFOSCQSwebNmxEMBvH6669jy5YtSE9PFwegMj89rJGskcyFAWtk37BGnmBAV7iATsEIhULYuXMntm7dCpvNBpfLBYVCAZ/Ph/r6emg0Guh0OgwdOhT79u3DDz/8gKSkJLz77ru45pprxKF81Hg0Gg1aWlqQkZEhKo0OYgwGg3A6nSgqKsKKFSvg8/lw4403wmazicMMr7rqKmg0GvzjH/9Aa2srEokEjEYjxo0bhzFjxoilT6PRCJVKJRp2bW0tUlNTcejQIVgsFgwdOlQ8J3UYt9uNmpoa0SjoYDqaqSsUCthsNnHwnPxQQYY51+nuQVOpVHC5XOJw1OTkZOzatQs+nw9erxdjx47FsmXLsGfPHnGIICWxHjt2TJzGbjAYYLPZ0NbWhkAgAKVSiZqaGuzZswfTp0+HRqPBX//6V3GiPGE0GjF37lzcfvvt2LdvH95//304HA5MnDgRH374odhNyWKxwGazoba2VhwOSSfLd3+uRCIBrVYLu92OI0eOwOfzweVynbJeAIh7I2/eoEGDRO5Kampql1APusZqtWLGjBmiLBJEQq/Xw+VyYfv27di2bRtWrlyJm2++GVVVVcJDCQBpaWlIT08X11GeDdkiefm1tbVQqVQYPXo0du/ejX/+85+YNm0aCgsLOaTwJ4Y1kjWSuXBgjey9XgDWSDkDvmlGJBLBtm3bUFdXB4/Hg3nz5kGhUCAQCGDVqlUoLy8H0Hk42HXXXYevv/4aixcvhl6vh8lkwi9/+UtoNBrceuuteOONN3DffffB4/HAYrFg1KhRqKiowJo1a7Br1y7U1tbihhtuwODBgzF58mQsXboUwWAQ11xzDRQKBSZOnIhly5Zh8+bNMBgMCAQCKCoqQiKRQDgcPsmgu1wuZGdn49tvv8XixYtRXFyMGTNm4Mknn0Rubi5ee+01GI1GlJWVYcWKFbj33ntx+PBhZGdnY/LkyaiqqsKGDRtw6NAhtLa2orS0FNu3b0ckEkEoFBJejHA4LBo1w5zrdDdM9fX1SEtLw+7du1FRUYGnnnoKOTk5qKiowLvvvos777wTLpdLJLMCgFqthtvtBnDCC97S0gKbzSaS47du3Qqfz4dJkyZBrVbD4XDAaDSeZGiVSiUOHDiAP/3pT2hpacGDDz6I/Px8ISQA0NraipaWFmRlZYmdj+TPQRsTkKiEQiE0NTXBbrcLr9bpGFgqg4SqqqoKDocDDoej12s6OjoQCoWgUqmEMMh3c6NQsry8PCxduhQVFRWYMGECqqurEY1GsWnTJlx55ZXQ6XSIRCJCQHpDpVLBZDKhoKAAd955JzZt2oT169dj586dQkyYnwbWSNZI5sKDNbJ3WCNl3zMgpfwXm80mdjxJT0/Hz372M0ybNg27du3CxIkTRYhAPB5HLBZDSUkJnnvuOaxcuRJutxvPPPMMJk+eDKVSid/85jewWCzYvXs3CgoKMH36dIwZMwY2mw0pKSmorq7GjBkzMH36dNjtdowdOxazZ89GNBrFFVdcAUmSMHXqVLz00ktYv349otEoHnvsMUyZMgUejwe33347CgoKusziHQ4H7rrrLtjtdhw/fhzDhw+H2WxGQUEBMjMzkUgkoNFosGjRImRmZmLv3r2YOnUqZs+ejdGjR8Nut2PVqlWorq7G7NmzUVxcjOzsbAwZMgQulwsdHR3w+/1obm4Wu7wwzLmMJElIT0+HSqXCsWPH4Pf7sX//fgwaNAiBQACtra3IyclBcXExtFot9uzZg4kTJ2L8+PFdyonH4/jyyy+xYcMGHD16FIMHD0Z1dTUyMzNhs9kQDAaxfv16pKamYsSIEdDpdJg7d26P20V7vV689tpr8Pl8eOKJJ1BSUoJ4PI7MzExs27YNLS0taGlpQUNDA2bOnAm1Wi086eRNUyqVGD16NHbs2IGjR48iFoth//79KC4uht1uP+36KSoqwvr161FZWYnBgwdjy5YtyM7ORmpqqkiKlm9/LUkS6urq8Je//AXFxcWYOXMmdDodRo4ciT179iAUCqG5uRnl5eW4++67YTabUVRUhOrqajQ0NECSJBw4cAAlJSU4cuQIlixZgokTJwoBpjqi7yWPXlFREXbv3o1oNIpQKAQAYutxXt366WCNZI1kLixYI/uGNfIEAzLhopsZOnQoXnnlFSiVSuGN0+v1GDt2LPLz8xGLxcTWlQaDAQ6HAzNnzkRJSQkikQhSU1OFkc3IyMA999wjtpW12WxQKpUoLCxEdnY2gsEgLBYLTCYTJEmCzWbDokWLAAB2ux2SJMFsNuOmm27ChAkTEIvFYLPZYDQaYTAYcN999510noBCoUBJSQny8vIQiURgt9uh0Wjw6KOPigRhSZKQm5uLhQsXwuPxQKfTweFwQKlUYtSoURgyZAja29thsVig1+uRlZUFjUaDjo4O4THo6OgQDZYHO8y5iHyL2csuuwwFBQV44403sHr1auzevRu//e1vMXToUHz00Ud49NFHUVBQgN27d2Pu3LnQ6XQnhQAkEgkUFhaioKAAr7/+OpYvX466ujpce+21sNvtOHDgAL7//nuMHz8eKSkpwkbIxYT6zoYNG/DJJ58gPz8f69atw7/+9S+UlpZiwoQJ2Lx5M55++ml4vV7o9XoxsEwkEggGgwgEAgA6wxxmzZqFpUuXYt68eXC5XAiFQpgyZYrIl+krpIn67ezZs/HBBx9g/vz5GDZsGKqrq7Fw4UKo1Wp0dHSIe5Yf+kgx+g6HQ+TszJ8/H7fddhvuuOMOhMNh6HQ6zJgxAzk5Obj++uvR0NCA1tZWSJKEG264AVarFR6PB9nZ2UhOTj7pIMlAICBya+ia1atX484778Tx48eRk5ODkpKSLs/CnD1YI1kjmQsL1kjWyP6ifPrpp8+ogDfeeOPpBQsWQJIkqFQqWCwWJCUlwWg0ilmzRqOBxWKB1WoVP2azWcwwzWYzrFar2BaXfrRarSiLXqxCoYBOp4PZbD7ppHv5YXOESqWC0WiExWIR5atUKuh0OqjV6pNO/qY49aSkJHH4m8FgELszEVqtFmazGUajUcSo0s5NdG9KpRJarVbE42u1Wuj1ehgMBt7uljkvkCQJRqMR2dnZkKTOc3SmT5+OmTNnIi8vD5dccgnC4TBCoRCuvfZaLFy4UByG2v3HZDIhNzdXxHLPmjUL11xzDZKTk+F2u+H3+zF79mzk5OR06Zfy/hmPx1FRUQGlUgmXy4VIJIJIJIL09HSUlpYiKytLnD80b948XH755VCpVFAqlfB4PLBarZg2bZqwCcOHD4fH40FSUhJuvvlmsftS97j83rBarSgsLERTUxOUSiV+8Ytf4MYbbxSeNL/fD6PRiJKSEqSkpECSJBgMBgwfPhzZ/00iTiQSGDRoEPLy8lBfX4+kpCTcc889uPLKK2EwGGC1WqHT6VBbW4v09HRcffXV0Ov1MJvNGDFiBAYNGiTCJeiejx8/jksvvRSlpaUAgMzMTGRnZ6OxsRG5ubm45557MGLEiC7283R55plnGp5++uk3Tr8VXdywRrJGMhcurJF9c7Fp5JtvvokFCxY802NbOdPYxKKiosT27dsB9LxjiXxGebr0dk1fv+/+/adbRvdKPNW99vfeTlUWw5zLyA8LDAaDiEQi0Ol04oT7WCyGQCAgElUNBkOPBorKoST+jo6OLuV0dHQgEAjAYDAIQywvQ963gsGgOFGeoLKo/Hg8DpPJBJVKJQaB7e3tCIVCsFqtIpQgFovB5/MJb6F8gHq6dROPx+H1esX22/RMAMSmBnq9XsTJ0zXdBSsajcLn8wkBJ1Gj+vf5fACApKQk8UzycuR15fV6xQoGEQ6HRfI1DbpPVzTlKBSK7xOJRFG/LrqIYY3s+/enKothzmVYI09dNxeTRo4ZMwbfffddjxcM6ISLYZgLD7nRlO8sRn+TJ+LKt7XuqRz5T/fP9mQYT1UGIQ8XoCTk7saS7r/7IZHdv7c/xpXuo7c6oL91/50cuu+e6kZ+DV0nr/vu5RAUmtZd0Hsrvz/whKt/sEYyzIUNa2TfdXMxaWRfE64B3xaeYZgLCzJ23eO1yRDJt6Q9VTny/56q/L7K6ckInup+5NfRd0qS1OXzP2YCcqoyequ3nn4n/1tP9XSqcnr6Tvmz9lU+wzAM039YI099P6yRPOFiGOY06M3w9NcgnWk5p/pcX3/vjzE+Xc7kfvrz2TOtZ55cMQzDnD1YIwf+fvrz2fNBI/lkQYZhGIZhGIZhmLPEGedwSZLUBKBqYG6HYRiGOcfJSiQSKf/rmzhfYI1kGIa5aOhVH894wsUwDMMwDMMwDMP0DIcUMgzDMAzDMAzDnCV4wsUwDMMwDMMwDHOW4AkXwzAMwzAMwzDMWYInXAzDMAzDMAzDMGcJnnAxDMMwDMMwDMOcJXjCxTAMwzAMwzAMc5Y44wmXJEmXD8SN/K+QJKlEkqTU//V9/BgkSbr8PL73KyVJSv5f38ePRZKksedx3f+/87zux5+vdQ+c//XP9I/zWSNZH/93nM8aeT7rI3D+2+jzWSPP97rvCz6Hi2EYhmEYhmEY5izBIYUMwzAMwzAMwzBnCZ5wMQzDMAzDMAzDnCV4wsUwDMMwDMMwDHOW4AkXwzAMwzAMwzDMWYInXAzDMAzDMAzDMGeJ/w9aiLv/t+87XAAAAABJRU5ErkJggg==\n",
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {
+ "tags": [],
+ "needs_background": "light"
+ }
+ },
+ {
+ "output_type": "display_data",
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAA1wAAAFDCAYAAAAu+g+jAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAgAElEQVR4nOy9ebht21nW+Y3dnHNvSEsSQqNRkBIsKEQEAS1ERR+KVAC1RCwCCFEioKKGLtIEiCARpC2waKURKjRGKXrFQvQBCRRYYBAsH4qQgIQQEiMhhHt2M+uPtd69f+vd75h7n3P2DbnnfN/znLPXmnM0X/+O+c0x5xrLslRTU1NTU1NTU1NTU1PT9dPe7zQDTU1NTU1NTU1NTU1N9yr1BVdTU1NTU1NTU1NTU9PDRH3B1dTU1NTU1NTU1NTU9DBRX3A1NTU1NTU1NTU1NTU9TNQXXE1NTU1NTU1NTU1NTQ8T9QVXU1NTU1NTU1NTU1PTw0R9wdXU1NTU1NTU1NTU1PQwUV9wNb1R0xjjF8cYrx9j/Cb+veUdjvURY4wfvmb+PmKMcbLl6zfGGD89xnj6JX0eO8b44jHGy7b9/r/t9ydtz//iGOPXxhhvgj5/dYzxQ/i+jDFePMbYw7HPHmN8/XXK19TU1NT0xkFjjA8ZY/zEFjdePsb4vjHG/3gN4379GOOzr4nHS8fa4tfrtnL8lzHGF44x9i/pM5V9jPGZ2zH/ItofbI/9XvC1jDH+CNq87Rijf4y26Q1CfcHV9Eig91+W5dH49yu/E0yMMQ4mp350WZZHV9Xjq+ofVdW3jDEePxnjRlX9X1X1DlX1P1XVY6vqPavqVVX1R9B0v6r+1iUsvWVV/aUrC9DU1NTU9IikMcazq+qLq+rvV9VTquqptcGbD/yd5Osu6A9ucfO9q+qDq+qZs4ZXlP3VVfVZl1y4vbqqruXCsqnpdqkvuJoecTTGeMIY47vHGK8cY/zX7effhfMfMcb4hTHGa8cYLxljPGOM8Qeq6iuq6j23FbLXbNveHGP8w+3dpleMMb5ijPHg9tyfGGP88hjjk8cYv1pVX7fG17Isp1X1T6rqTarqv5s0+/DagMWfW5blZ5dlOV2W5deWZfl7y7J8L9p9flV9wuzCbUufVxuAmV0INjU1NTU9wmmM8biqel5V/fVlWf7ZsiyvW5blaFmW71qW5RO3bW5ud0r8yvbfF48xbm7PCcs+frt74uVjjI/cnntWVT2jqj5pi43ftT3+lmOMF25x9iVjjI/bHn/T7Vjvv/3+6DHGz48xPnw21hoty/LzVfUjVfXOdyr7lr6/qm5V1YeuTPcNVfVOY4z3voyvpqbrpr7ganok0l5tLn5+T20uXl5fVV9WVbXdhvelVfV+y7I8pqr+aFX91LIsP1dVH13bu1HLsuhC5vlV9ftrk+zftqreqqqei7nevKredDvXs9aY2lbWPrKqjqrqpZNmf7qqvn9Zlt+8RMafqKofqqpPWGnzz6rqN6rqIy4Zq6mpqanpkUvvWVUPVNU/X2nzqVX1HrXBsj9Ymx0Tn4bzb15Vj6sNxv2VqvryMcYTlmX5qqr65qr6vC02vv92q/p3VdVPb9u/T1X97THG+y7L8ura3I366jHGm1XVF9UGY78xjXWZYGOMt6+q96qqn78L2auqlqr69Kr6jDHG4aTNb9XmLtnnXMZXU9N1U19wNT0S6DvGGK/Z/vuOZVletSzLC5dl+a1lWV5bm+TJitVpVb3jGOPBZVlevizLf0yDjjFGbS6i/s6yLK/ejvX3a3eb3mlVfcayLA8ty/L6CX/vsb1j9ttV9Q+r6kOXZfm1SdsnVtXLryj3c6vqb44xnjw5L4D59O1Wxaampqame4+eWFW/vizL8UqbZ1TV87Y7Jl5ZVZ9VVR+G80fb80fb3RS/WVVvNxnr3arqycuyPG9ZllvLsvxCVX11bbFxWZZ/WVXfXpvt8U+rqr92BzL9+zHG66rq52pTXPxHk3ZXkb22fH1nVb2yqv7qSrOvrKqnjjHe7/bYbWq6O+oLrqZHAv3ZZVkev/33Z8cYjxpjfOUY46VjjN+oqn9bVY8fY+wvy/K62uwH/+iqevkY43u2FbRET66qR1XVT+qCrjbbEniB88plWX77Ev5etL1j9oSq+s7aVOtqjPHUgZd9bNu+qqre4ipCL8vyM1X13VX1nJU231tVv1x3BnhNTU1NTW/89KqqetIl28ffsnZ3Vrx0e+xsDLto+a2qevRkrN9TVW+JQudrqupTavP8lOirquodq+rrl2V51RXlIL3Ldv4Prqp3r81W/Nq+DEO4+Yy6muykT6vN3b4H0sllWR6qqr+3/dfU9AajvuBqeiTSx9emMvfuy7I8tqr++Pb4qKpaluVfLMvyZ2pzYfOfalOZq9rcESL9em22I74DLuget32QtyZ9prTdJvgxVfVhY4w/tCzLy/iyj22zf1VV7zvwBsJL6DOq6qNqs61jRp9aGzB81FV5bWpqamp6xNCPVtVDVfVnV9r8Sm0ulERP3R67CjnO/VJVvQS4+PhlWR6zLMvTqs62z39VVX1jVX3sGONtV8aaT7qhb6uNfM/dHns/4OY319Vk55g/UJvtiR+70uzravOSqz9/VV6bmu6W+oKr6ZFIj6nNhdJrxhhvWpuLkqqqGmM8ZYzxgdsLmodqs23idHv6FVX1u7T9bvuSi6+uqi/a7kWvMcZbjTHe904Z2+5v/5rafQ6M9E9qA2YvHGO8/Rhjb4zxxDHGp4wxnhbG+/mq+taq+riVOX+oqn6mqv7ynfLd1NTU1PTGScuy/LfaYMqXjzG0y+NwjPF+Y4zP2zZ7QVV92hjjyWPzEyPPrapvuuIUr6iqt8H3H6+q125fGPXgGGN/jPGOY4x3257/lNpcWD2zNi94+sZx/nZAH+sq9Pyq+qgxxpv7iSvK7vSpVfVJs8m2d/o+o6o++Tb5bGq6Y+oLrqZHIn1xVT1YmztUL6rNNkDRXlU9uzaVvVfX5tmuj9me+8Gq+o9V9atjjF/fHvvk2lTDXrTdnvivar6v/Xb4e9oY4538xHY7w5+uzZ23H6jNSy9+vKqeVFU/NhnvebXdbrFCn1abl3s0NTU1Nd1jtCzLF9QG2z6tNs8p/VJV/Y2q+o5tk8+uzcuW/kNVvbiq/n1d/RXoX1tV/z2ekz6pqqfX5gUcL6kN1n5NVT1ujPGHt3x8+LbdP6jNxddz0lhXlO3FtXk04BMn5y+T3dv/SG1wdY1eUFd/nrqp6a5pLEv/5ltTU1NTU1NTU1NTU9PDQX2Hq6mpqampqampqamp6WGivuBqampqampqampqamp6mKgvuJqampqampqampqamh4m6guupqampqampqampqamh4mu+kNyUxpjLPb97POyLGff08s5vO3a+bXxxhgXvrMNz8/4uR1eZuP6HN428e3yzc41PbxE32q6XlJsePxel77X4s6Pr/UnrcXzLG6TbJflh8Rryk1r+e2y8a8i64zvCf36sixPXmvQdE6NkY2R9wI1Rj581Bh5b2HksixxgitdcI0x3mNZlhfNzu/v7+/8dUb29vbq5OTkQr+9vfMbbHK2MPfZ+fT59PR0tQ0deW9vb+e7jy+eTk9Pi8S5xLPzsLe3twoy7pAci3yyDed2fk5OTs76jTHOeHb+xxhn+ldbzpEc1XWqcdWWvHMs6oD653icI/GleU5PT3d0QBkpp/5yDPej5Fve/vT0tPb393fm2d/fP/vsflRVF+TiOerj5OTkbOy9vb06Pj6+oMvkz5RFvLis7pMuo8j76Rhj1u2a4ob6cJCY2cHPeQy6r1M2nyvFp8s+ixnnVTY5Pj6uZVnOvqcxE58zPaW+LvP+/n6dnJzs+Dvbuq+n3DiztwP3LOc5bylveR6tqjo+Pn7pBWbuY7oMH6saIxsjGyOpA9dHY2Rj5L2CkcfHxxf4OBsnMXk7tLe3t8ghnTkyUnXROSS49zk4OLgAPjMw4Rg+H3g8S04MvOTEyYF8XhnXEw3l59xMLB5A3scTHw19FT5d17NA5bjJLuyrwFMA+JgEMrenbCkbpISV9OZA4u2UoKkzAsxa8CSd8+/+/v5ZcpF8Pp77sAcwSYl0WZa6ceNGHR0dRaD0xYH0R1tStlnyoJ9qHPcfjuk6pq7dpvQVjX9yclIHBwd1fHy8k1BddsYOvytB7e3t7Sw8leg9yeuc653y08fEwyxBc8Gk9vpO27iOPQ/ouPyHx2hrzuE+MLOj2jD+Up5wHXEc8k2ZqJME9Jx7+/cnl2V512q6EjVGNkY2RjZGNkbW2fF7GSO3PMaAupYLroODg51J3ZlmAtNoqb2CWYKkMaqysXjcq0RrMqfqEeed8UpnpVFmAe9zMslqHFZ2ZomW4/IvHcZlmAGzJ18mVAFCAhLnz5O5JzUnT1LOL/U9xrgQrF7NSaDmcztIK0i9AqM2rOTN+HZ9uy6VLJU8UyKnvr0/E9hlfsw+GodgwsrirDqUxlLfqotAwcUH7eQANItf1yNtQ13oL4/7uA6OySdnCX8tbjkux6DtU2ylOJFuaHMHRY9N54HtuQCfVXRJKe41drKZjvUF1+1RY+T5+cbIc3kaI8/baKzGyMbIRzpGbuMkXnDd9TNcZKpq90qZiVfn2N4V7QLRkDMg8NunDHDOsZbIdZ7OlozgRmSweJLjX+fBAWvmNDP9iRKPKXHrc3LoqnOg8MSnc+5gnMvt5pUu8k2g80THv05sR3t7YNKWngwSoHhC8WqgyBcKvtDgOFy4OIAk/6c8XtlyHaUqoldh6DM+BkmVNk+I9M0EXJ5oUpynqpaIAJBiWhVeXzA4UXfJf9aSr+ZJY3q/2bhegZ7Jc5U8MMtNass84zkx8c1jvpiazZPkcznW+Gxap8bIxsjGyMZI10dj5L2HkWt0LRdcSRnJ0AwcKtkTur67U3tSSnP7lT558AQgx/VxkqNRybM+dCZ99wSh9ry1nBKv6zAFh8bnLdRU5WSScHlSALgD+pzkU98FSDrHROoyuGMq+Tq5LGOMnUStPkx6HC9VMCk//60lL+oiPWeR9MKx3T7kl8CUAJZjz6qWngBmiwxf6HiFxvmm3RKI0qfdH9y/Ul/Guujg4OBsO0kCS6c1wKTvcsHktvCYnQE5x1yTl3xRT/IzbTtYizuCuqq9qiDrvNpyzzj58/hdozXf85zadPvUGNkY2RjZGOl863tj5P2Bkdd2wZWS0ywZUlkpOLUnlZVAju9KmlWlyI8CcQYYHkwcKzkMkyfl0nypEsN5JVPa2+9yOhDxPPf2+nxMMjPZ3JmdF9f7Wv+0QPCHX5kQGRzuwGvBmWzt/KXFjSccBmZV7Ty463JoDvkrj9OXfMGiY2lRdBml4E4y61xK9L5oolxcGKkNYzeBTlp80IazpOR24dj0UybGGcgS8AhWszzgfbnQY9tUsSKIehy5/9K2KZnLf5K/+0I5ycLqXapgM7d5jCbdez/PN66PNVBvWqfGyHO5GiMbIzmnjjVGNkbeCxi5RtdywTW7belBQ6E88blR0xzqQydYczhXUkpM5E2UDCGSTLzVPEv2HHctwfucpFR98lu1/jnp1G3jlIDP+9DOTDqcY81R1Z8BKqf1MTwZpoWeB57LnGRIbaqynikndbCmR78F7kHPyh3jhDpgkvfk7eOQPAZm/u6+SPm9n/up62c2ZkrULpPGTQAw45M0S3YEmJQL/E1xLjvtlvjV3GvnRQlkeVzkb/tK+ZTjU2/pPH3K8+5l5LLpmMZJFwBN69QY2RjJeUmNkY2RM5k0bmPkIwsjp/0uHfmK5AZISZIK57+qDDwJVDi2j8ME5ELzvPPtbTiHeHNnmDk62/pYNL7z44Gfqlr6p0pFSv6zAEs6ZxsfJyVh1xflmNnzMn273n1+Vk4ceBwAZ4Gc+OfCRJQSp9tztu0k9aONkj7dR1w+3/4gnTpoUGeeYHyBwc/UXYoX13/aUuFAqzaSn4la592HZmDievGElhKr68ftIXl50eB9vZI2i9kEjtLD/v7+hfyVbJMAw7+7H7nNmS8SmHsspRhOPHARd9WKc9OcGiMvtm2MbIxsjGyMnMl6r2HkXV9wUfn6rluCblCdXwMaGs0F8GSsY+6kKaGv8a+/7hTpuxzFHcINQX4YYN42AeBlCcJ1xnEkewJn/04n9OOeoKlzB+5EDjRecU06Sf5APXDuFDQaj/q4ik+kxOg8uO1cB66HtSpfsnXiMSVt5z+18WTreqT+PaE5yNLmHCPxPauy+nYN5yXpOyVMB5IZAPrcslGyv8c/27g9xhgXYl9/WdX3yp/LnT579Tj5jeeKmSyXxVKKaW/r+khtm65GjZGNkYkaIxsjeb4xci4j+Zz1I49vrBh5bXe4nBFndmZICc4ASz+ulgAozZNuI6uNK5LnaCC2938OdDNnSYGVEqTasNqT5vd+LrPG8LlS8rwsaXvCoTy0l8afjbmW9GbbARI4OV9rSZ102ZaDNJ5XLNw3ZuA7O0Z9efUrLR54Xsc8Get3R1JlzPuTqHeO71s8Eg+zMdO5GVhq/tlCR3/5zEaK9wT+1HUCmSSXj8eE7n1crpRgqd9ZxW1WdXfbrOW7JIPLTD2zjXLrbMHhlEC/6c6pMbIx0ucWNUY2RjZG3tsYeW0XXJxsFriJydTfyQOZiXeWLBkkCQwSf86TO5cnwjVZUiXGyUGA48+cxXWx5vA+T+LZ5Sf/azJS1pQ82S9VitLWGq/kkR9/kJK686BlsKZb2ldJQm43X2wkANFCxu3nt7KTvb2PA13inTqjHlMyFX/pOB9yTnJ6+8Qrq2O0rW89oA5cvpn+UwXO25CPGd9+bKYrPz5r4xW3NQBIwOM2cd9O/ZM+Z7E506cWKPSzNXv756Y7o8bIxsjGyMbIxsj7FyOv5aUZoqtWzkSz4PFbj8nRZkDE9inZ+nFXZlLumsM5iLqR19qmZERDezXOgXnGF/v45xRsyRGpS+eX8vEckyn1riTlP1rn89D2Tp600vYF5zMBhstOn/Xb0AQ+l5UyuN+neXjc28+SB/slflKssb/rmck2AQb1n+TVea9i+SJgFp++v5zzaFx9dh0kfXrcphhLslMe92OO61VN9zmXOS2aUpJO8UufcN0lvXEsl8d5IaV8ugacMz9rujNqjGyMbIxsjGyMvD8x8q4vuNwAniBnbeU8SfCUyD0xJPKA83k9WFLic34ZuDKCQM6rMF5dIF8MQL/qJ+8ug+/jJh/u6LPb3pSFeqfunQhslwWp/nqCYgCnW9Ep6TvPM0c/PT09e+jSk3/ih3pxH5Ueks+wsnUVH/SkM8Y4e4WzA2aqQCY/GmOc/TbFWtLRmD5G8jvala9u5Vhug7Tooj2cJ/bn+CTy4fqfASt1SOJDvvTtlLwpj8dA8p3EV6qG+mevzjnfkl/HZWf6j/NGO6Rc4218scKcwfNp3FlubLo6NUY2RupvY2RjZGPk7uf7CSOv9S2FZFhMXuVWfXI0T0z86/OINBd/hCzdlnfn9vmTESkLA9Tl4w/4uXOdnp5e+AHDpAc6uTuCV5huh9RvxiMdOtlDCTEl4xnY8Dznrbr4hiHy41tdPIkmJ2cSmN3Kph5IvsggjwmYk4+kYGRwU/7LwJRg7H48m3eWsDWvg5fzshaDPifHmNmDAJF05nrm5zV/m+mCn9cSvcuY/CHJnEDRF28+psf0TE9M8LQ7z1+mb6ekv5k+kj+zXfLDptujxsg6m7sxsjGyMbIx8l7EyLX57voOFydLyf2yRMPPVJBvHXDHdKIyfBxVejhX4snHSD/c6HPOZHY+ZqDm8lblvduz5KVjDgCpLXlmAnFH0fxenVT/NJdXoBQQrED6g6wC1mU5/xV7zrmmb352nXNLBeWnjj1w1V5t/AcemQxnAJoSi59z8HawdB9mu1SNYkxI3z6Px4CqddK59O/6Ij/Oq+amfb1i6PwlWzoopUqa+EmJ32PJ84svxhzUEgC7vqkPtyeJOpzlNo8/b0seyL8DiCd59wMtXmaLrssWU2kB5fptuho1RjZGNkY2RtIOjZH3Lkau0bXc4ZIwqjbpH4WW44lB3qqjorxixe8USEktOWm6/effZ8oRn+4USjBejSBv7qx0oL2981flkj//y4TGc0nnnqAODg4uyMz2KfA8WNMclyUyHvfKabr1L13we9W5L1A/s2qJEqDriLea3fZeSdF2C8pIG7nsnkxYHXFQTtUoD3xf4OjvDKwc7Ge+7JXPBDQEEreh257nZhXs2aInJUDZKSV3p6vInIDa5/bjSrYEMi50Lls0kD/19Xh22ROA0C+8rcdOiusEcGmLzwwcGEPuM/Jx2neWj5rWqTGyMbIxsjGS/XzMxsh7AyPX6K4vuDy5UGkyDpXhb9Jx4jkal3O5ATkWkzEDOrXlmLPvVKKSTQIo3wrgsvu46Ze80+cZ0clk7OPj47OxPTkleajn5OQOkBorBZ1XcpLsPKbKqAOaE9t4JcirTRo/VT+Z6F03x8fHF5KWeEzJRjx4gvCFhvsDk2+qgPKfg14CG+rCFy6Um8f9PMdMCyC3ifsAK1auG68kroGG+tO3KLMn8EQzP3cdn5yc7Cx6eJ4JWCR7pd8XIZ9abLo+PZbU3p9ZSPy7HVJ8avGiv6ma7XqmTn2sGZA23Rk1RjZGNkY2Rrr8jZH3H0aOuwXUMcYiRScDudNTCH3e29s7uz3tQcQx9JkCJ6BxB/cg0XF996Qso7Cd2vq8zpP30bieXDj3ZeDq1SPXseva+aUe0jgEAc1HUJ45sldUJCtlkg0dgCiDB5+DhvPtfKjP/v7+GaBSJ/KxNK4HMQOPyV8yOD8ur7fTHLNkSF5mvuBJSHMQZPy8Jyyvlnk/Aqb6Men4go3yOaUYcd2xHbcl0U5rlOSd5R1Pui6Xx4jnKJ+X8iR/mYGCANYr4e77sznXcgxznuexMc4fSnceZnxybl80HB8f/+SyLO9aTVeixsjGyMbIxkhSY+S9i5Hb4kRMVtfyWnh3BAeQ5ED+3bc4pKSSBPZAk3OqXbrV7UGbDCSeXKbU3vlzQ/EcHZr6E6CmOdwB2ZaOIyD1oBHNkgB14ufTYmAWxOLLt5CwD9v7w9Gca1Ztdf1wzOPj46ifmewMNvKo/gxyghQrj8kHCMTkZ+YXDjhMCgICT3z0oaRvkQO4+KOe5X/UD5MRdeTA6LaTrMmH2Jbt6bOM37SNQDxSN6JkR18MOi/U98xP3Sb8m3zU45UxQB+gLS5L7mkBxdzHWOJnju/5bS3eddz1dhWgb7pIjZGNkeKlMbIxsjHy3sXINbrW18K7kLOEROUzyGbG9KtS/8w5HDy8DR3dHcL5VELzyqTz6cCkIGVSIbFKoD5eXfOkRBmSYV1eD7KU/Hx+t40HInUufnXMnZtJ3BOWj5fmT9WcGfBoPlYYfC4PCH99LJOg+D44OLiwh5zJNY05xth5ToA2pI3d1owXJUn3dQ/0tACbLdxUgSGflMv5ob0oi2SY7a1PiZlzpKSlMT2xejzys/TqvsZ+njxn+UnjJsBUW4JD+q0cAobHp/sJ84r8O1X1XGafJ7VZW4CxrfvUbIFInquuXl1tOqfGyMbIxsjzMRsjGyPvZYxco2t7aQaZSNUAMkrG01WwnFUJQuMl5/K+rhw6j9Ps1a+eKI6OjnYSgicFr254ok2GoEweTOKHAUu503heNT04ONjhKQUJ5xI/Xg0k8OqYPzTrenOeZUPKIB0QPHg7d20xwERCAJM9fR+xVwJ98SD98fzBwaYWofkoR5LRQUTtfF8ztwCJD3+w3RcqXs2b2d354ThMWr4NyRdC+px85ODgIFbFGPNqR5/zOPF5k8/N9Cy5ucUqJVOPewcfjkueuG888etv5vKxku00lr7zYXIHMre329ft4rSmwzXQS+08r1wGVE2ZGiMbI6k357kxsjGyMfLex8hr2VI4xvkrQhNTzgiP7e3tnd3mZtJSOxpjrcrngtJ4lwVLChx3Lg+MqotvnnHHpvMwYDgfE4Ta0+FnxuN4VedvXxLxIddU5anKe/T1V5Uwr5JqbPVfc9hZYnLdeCVhFqDiy4+Rj1mQO7FymKr21NHh4eGFfe+SyRdDfMZCbdRX86S33nB+Aqu++8LHdeoJlHJ5ghXfyR/4mQlvpj99djsopqvOfYlVXCY0r9B6ouPiTJ/l76xSp0ovZWRseUU+LaIoJ3ni3n3y5zHtMqqvx4b4ZF/Xs3Kst6HNSL7wEz/K09SF29d9mPw13Rk1RjZGkhojGyMbI+8/jLzrO1xikJUor0JVXawQUOGqcLgjzBxZ43ogeHB5hSF9JqVkNJt/Np504CBTddEJOD6v6lU5cj7ozCmJJ749aSS+GTjiXWAkB2ZV0sHHg01zqvLFW8EMbM3t/pLsmgJdCUUJaqYP8uyLhqoNODoPTMJHR0dnbZm8XH6+EtnncNskvekzZZ4thHxsH8fj0IGbMjl/7KN/vthIupxV5wjEHhvkTXz7+KycJ/2Rh+QDihfNzTdr0cdTbuLbmnSc/X3B5QnabeUJmp+TnPrsFWvqzeM6gbGIvHmVMrVvuntqjGyMbIxsjGyMbIy8ljtcVReV5gwxoei7G16GPD09PQMo9vGHTRPJKGl+Kc0rZEwcs4RbtVvR8uRER/TAo3MwsTFRewVEbVMySgGpz3TAdGVOPakN20rnbh8HkQRaPOZVI205SZUP2piANksO4i0lOVWDOa5XSGkLPtybdDSTOS10/JifV0XMF01rVVQHBLcNgZuySn8JxJdl2dnaMVtguB1ncnMu+p6Px7ben3PSH5INHHBdVl88cNFKQGFljHpIydSBzpOv5xOdd3+fLQA8Z3E+5RXXo8eciHGvMfhMQgI25yHZa5Zzm65GjZGNkY2RjZGNkfcvRt71HS4agEqUAvzBwRRsFNzfnMPPKTlKoQlkXBF0PPLon51Pd87UX/w7uHgCch6dVwYGg4Z6YvtULZolh9nxqtqpPPkYCdB8XPHCKiR1w3lcP+rLBDnj2feeu+40jiekBKoaz8FG333BQL2khQLldL15LKhtsmGaiwst94c1nnme9nEAo/4SSCbd+XG3rf/uDmWj/pzv1C/pfqZXjUuZPHcQSLiAnS1yuVgkD0gY+XwAACAASURBVH7cK8m+qKI8rg/3A8aBA7/L5z6RclUCBvdv/eVibG3bTNPl1BjZGNkY2RhZ1Ripce9XjLy2O1wiN8YsiF1o9vPEu5Yw9VntVL2RQR3gPMHreBqbTujKTE7nPLr8Hjg+pvOTrug5LoPI+fGHPpMjUT/UC/XjPDOhpSoL9ZaqgA6+5CMFnZO/PUiB7A+Hup0pB/klyDFRkHe2oz1YiaO+WYGVzKxqObCmhZD7PT/PFkyuU573pOd2pU4S+KaE5vPLJx2QaBvqIOUKVpFV7eTrh9cWNuncWi6ZgabrzGUVj+QpAb23nxFBraoulZn/Unz62IrFtVzj4wtcBe6XxWXT1akxclfuxsjGyMbIxsj7ASPv+oKLDpKYJdMzhpICpBwHA533uXiOD41eNka6Gq/a/R0JzuH9PXm5k4koOx2Azu1O6gnOZVijBMJ+fvZdfKdbvTPwSQ7vCZzjK3g8wXg79w0HJp5P22lSYiTP8hevlPhrc9cCmm3SgsD9MwEDdei6cUoLnLRQcB/1MVwun3MGpGkxxzlnFVSO6bJRd6yEcWGoY2nh4X7C4/TDWbXe+fJYnx3z3DLLNyTqM8UHgSQBly863Mdn8ceK3Mxv/Pva3Zemq1FjZGNkY+R5m8bIxsj7FSOv9YePxQQFT47nx5xZKYKKTEkljZ8AgvNe9oNzLpMnCDoex0q8qI/3dUrHOU+qtKR2SoxMWJxfPCfAS/rwgEg8ps8JXMQTg3uWrNN4SiACOE9wCQyTLJRb/pV0xURcVZFX6dITnGyQFgL87tVZkip7DrYzH54lv2QP6jTp7rLYYJuUxAlKnuySzyXgTYnLx521uwwgvc8MoNiHx9PdBNlk5veaJwHWLCekN3WR95TfOD7fpMRzrC47MCUdpnhquj1qjGyMbIxsjBQ1Ru7Odb9g5F0/wzWbJCmKgnj/FLQcZ83QNJIblEFNnviP1QE/lnjWPPq9BfLvt4op90xmgsZaUva+rhO/0k5/Z1WOFAjJVjPnS444S/AJGKWjpLs1x14LRp+P55J/eiC6H/h8ae4ZaDFxE0g5PnlOIJ+Ax0EkgYbLlxJ9sn/iRbzPbLzGo3xqtv3JdU0e0/gzWTkf/T4lYn6mLi4DqpSPUnvyc5Wx6SeM85QbRNKbg7PrMuk+ycbPa/M2XZ0aIxsjGyN39dAYeZHHxsh7GyOv5Q6XJx3/7LeH/Zj38yB1R3ClrVVw0pYHdxpevbO//qXbqj6my3qZM/Kz792dOWXSlYOEV/pSAkk0S+JJjhnIeeAz8bgeyQsfJHXZLgONNd2sJR7nmYsG6jTxNgNzT2CetCmTg9wsYBMIMnk4z6Tka1y00D4eQ15hvey2vebwCpwqtgkwXZ9uE58n5YzL4sQT8ox3Hy+Bub57/lqrQvOY3+XQuQSY1PfMz9JYJG5v8bZr/XTOZV7TW9M6NUY2RvrxxsjGyMbI3WP3Akau0bVtKZSDJqcjg8lRluXibX4KxH8+b5orOYuDABXtDsS52cedYbbP2I9xHk8gqVrp5P14fPbZE7I7hPMxSw6zMavyfmXK4cDtY8t+rlcPtDXg1jH/4cAUcLOgkt/xt11SgvXFQgL0WWKkrKlvskfSp+j0dPfHIVkhTImQi6ZkK/Zz/r3q44sMjw/3aSbHFCf+3f3UfSTpx+Wmf5Bnn4v6uGwhl/SUwCnFm/Oo+ZI/6ZxvwdAx17t/15ieU33rCPW7Jq/GXNNN0zo1RjZGNkY2RjZG7urnXsTINbq218IroH1f9ywRqo8nHZGDSDKyOw95mQUmjZcqez5/Ihlojf80ZuJxWc5/DNABivp1npKuJBPlmx2/U2JVRzLosydKDxRPEuLd+fFFhbf3RQb7zcA4Je2q84eKZ4BGGf17WnAw8CkzkxvjhIGv766jpAdP4Pw8A/YUJ0lXHEv8ccGdYiYlWOp3bcGWZPQxHFyod9ljbduA+72DQIpTjjfTlcbxhO3ju/8keZ0PB+e13LA2HnlIMejHXK/U92WA0nSRGiMbIxsjGyMbIxsjr+Uthf62ossm5Tkpileqnpz9sxuef/1q3IPAjaw+6Va+A5L30Q/j+bzsPzOEB2EKEp5P+vTx0u3RtI2CAZACM1UlZ3yn4y7TLBkkW1OPs0Sj9i4b9SL5OPcMtCh3SjIEAhF1RP9hW/mfg6rL7/ZJYJK2RZAn9id/Dmak5C8uj9sjxV7ile2d79mCaPZXvLMC6gmcxL6+uHWdOCCmNj72DOzTQiJVSynXDGhTPkn5Y01ns4VBymue/5wP96umq1NjZGOkH2+MbIxk+8bIewcj12isJf2r0N7e3qJqRDJwSuQXmAjJk8L4rVL1ITF4PWGqvTsO501J1AORtzeraudtQJ4ckqxXBVm2cz5nxDZ6Tagncr8S55yc96rVglmwsS1v63tyJ98z3XPMZdl9Kw3HnIHBTFeiVG2egYnbn+epP87BX2tPPMwWE76okWz6vQ72TXKRPLk6iOpc8uPL9Ody65a+H08yp2Oz+EgAcJW2a1sFkj1FHndpQTDjjbko9Umf12RJixmfNx1fI8/LLjcXQl6JPjo6+sllWd51dYKmM2qMbIycyaAxGiMbI5PM6Vhj5EVZ3pgw8ujoqE5PT6NzXMtbCqtqZ2LelvMrRlJyLr8SnyUrfaYT+5hjjPgL2xzTk4EHmMbXG3n0b39/v46Pj3eSL8dwY3m1R/18Lo4/0xn5pS607cL3Wcs5XL6ZTqjTyxJUkpfnSWkrhwcJz4lkw3RLnL/w7ec4HrdEsGpZtftA8v7+/s5v1PAvx6BOk/6SXPxMvsQTdcTPApAkG8fS3Dzu/9wffJykZ/bztpJz9iOis/mdUnvnbU2GGflChvajz1wlPpLcCSC8jx/T31nV0e1Ked2XZ/Kv6SX5n/ObLgr4t+n2qTFyd7zGyMbIxsjGyPsJI6/lpRkkVgicGQZOStqz4FMbHverZSZsfaaTOAiJ0i1E8ea/80AeCA4CFY3nyVBtmRT0XcTKGnWVkquOJwNT79SpEpxXqyg79Zn0w0TgydP153ymOZKNfX7x4E7u5AnPdTMLBj4bIGLlU4sk+kNKdr4tgklA8/giy5OJiL+i7jpzffG8PrtPuw0lGyttzqPGp045hsaUbujjtAF14NUstx/lYgymCmuqjCXbcxxvO8sd8gHnx7+TL1ZVE7BwbOYKt4nPl+I+2Sb5ygxQBITJb1xOH3MNTJquRo2RjZGNkY2RjZH3Jkau0bU8w+UOJ0pJ3Nu50q7S57LxaOiqi7dKU9CncfnjfHJgVgNnVTvKpeOU8+DgYOeXsmd9HJhn5OCdEtXMOb39siw7FQ8fn07qyVbH3CE1psZL8mqs1K9qnqQUBGznPJPvVOV1HRwdHZ0lWwYe++mz29B1rjFUUfWqiPsNeaJNJRdfj5zkIF/OJ/Xi86UElmTlQnCMze/snJ6enm3R8WqPj+8g69tdSA54h4eHdXx8fGGbEvmbAe7h4eGZD83I5U5boWYLgLRIS75GfSYQuyzWZ3Knv5SVMZoA2AGfYzNu1/TXlKkxcnfexsjGSJ5vjGyMvJcwco2u5Q6XJ+W1Sf02MonOqiBMAOXj0CHHOL9Sd8emMmX0VA0ieFAuObEnTAcZ6iElG/KuMd053JF5O/oyo7oczpd0Q12q+kAA9dfIcgz10zyuVwaHbOLEapEDl+ssAZfPqe9OvngQX/Q1T7ZMPAnw3XccFOQT5DktotjXY0NtyIvLKt0SrF339GcHYcq3lmh9geBJVX7DsbhA8IWXxiGfPr/rjVVy8pESvl8kcBzJrkVdks9l98UO2/rixuXiefpv8qk1XjQOY4bnfcF22ULU7e7+wzZXzTlNmRojGyNdr42RjZGNkfcXRl7bBRcnTEqZJXb/7A7px/V5bS4GoR9zw3uFJilShmPlw9sdHx/vyJr4oTOqKpEclHwISGZJOgFkAlwec2fUA93UV6rIeXVN51z/XvHyAKLuOZbP4XOlPdDs58mFuq6qs4qpV8Coc/qWwM6TE3+7wRcBnkw0v+uEfkW+2c8/p+8c3+3gSdDjidsy0twcYyanP/xLAODzG+5/HF/H+T1VGXWcfuIgQt3y/PHx8QXdcEuK7MxFAqu6XCSk8RN4EmioW8lAnlIc8zv5TjbiAigBmgOpPvtCyeX3OdYWHE1zaoxsjGyMbIxsjLz3MXKNru0thS6cBBFTFJTt0lW2K0JCzSptpFTxYyUwGYrzegJkW47DIPOtDwogzS0DpeTJtp7APXmnxU4ai0FN/ZBHb5fmYRKs2k3mKZhmgO8Jmbe/aVcHPi4KVE3UdyXDlBRcnuTjs4Ujk1+yp/tFGssXLLS/J6bEF+fjOJp7tkhaW6BpvlnCTYnFq3scl3HAePWELMBSX/qj2oivg4ODnUUZea+6CDBJRh73ve2esJPufEGWdJsq+NQXfSXpnDrzOPRFYQJV+kOa3wGE+ddtR5l9sZwWRlu99lsKb4MaIxsjGyMbI6m7xsh7FyO3sRaD564vuMYYy+Hh4YWrXE/UfnXOednHA9jHmyV/H6vqIogkB0mOqe++n9UDhQZZ40/y+5xJ5gRUDsKp2qi/3PtOWbktYy2RUpYUvA7uOs/goGyuj1nS8+ByfvSdQZaAhJ9nAepJnrzyoc4ZyM18wduor7YzpHEcUDxevILk+vJ48SSjOS6rnvPVvGsx4bbiYiQ9GzDzd/KW8oTPmwCR4yVAE60tclLCTCCrua+ib5ePY6R4ZjvO68DLuX0eyirZ1kDXY9qByRdQor29vX4t/G1SY2RjZGNkY2RVY6TriX3uFYzcXkQ+vBdcTJTJ6WeM87i3rdoohbcVZ8lpTXFUDuee3X6v2nXaWeLQVbG34WtYE3/qr4coXZ5ZciT5rXzXn8ZVNSQlcLVzkEl2cD9xe5BmycvnZdskt6of1GeaJwWVJ2S1T74iEOE+bwILE8Ks2pmSEuX280nfKXlxfk9eOpYWKklOB9eZDZks3Qa+aEyJem3xwLHd3uTXk/Bawna9pTndLg441Bl5dJ6T/bw6dhVyPXryp2/zYfBUuXVdOVG3fJjaZfcFjI+hc8fHx33BdRvUGNkY2RjZGOlyNUau0yMVI7e+Fie662e43GmSI9FZZg5NB+JYcmRPUO4wrpzZrb+1RKDv+/v7Z7duyVd66NKTvDupJ1A3LNvSsVwP+i49+FuSnKjrNf5cHvJNfrzKpeBKCcyrK86X/l6WiKRz8enJU/0S0PC8v8rVk44n5zQm2yUwUl/tyfagTX08CVK3nFdvb2Ilj9W2FFOpspTiZeYjBFSPO+rYeZ8labVn4p3J659dNgKdVxBJs3H5vIfOeaJ2+ZI+mWCd76Qbt4+PT79OMZQWaVpQ6pjP4znEKdnAgTPps+nq1BjZGNkY2RjZGNkYeW0vzSBjfswZ8UScmKRACQxc6Q5ETBBrvPAzDTgDr1S5cF74nQAgHmaJccYjeRBvbEti9UlATF7XKAWB+ioJ8HdSWGFgok2A7fKxeuEBOnvI1EErjZvs4wHjScp/xJG8pPY+lmzpvyMiIODzC25/D2T6kCeA2cLB+6gt+3t1iecTGItP+lDa9uE6Ej/uo7RhAhy2TWBMXdPfEsAn/xBfjCH3D++fZKHfz5Ir+U68uL1PT0/PfF6vEKZs6pP80ytuCdTYnjkugU/yhbVzTVejxsjGyMbIxkjy0xh5b2LkzAerqvIL82+TZiCiyf0H6lxJsyShSggF9sCkQt2xLuN5Nr8cbe2fg4B4S8f11x2MDjsLbE82JB5X4B8dHZ2NyR+QVPKaAUqal2NrTK+KJjD0JOjyiWTLNd2wD48rMFgFoQxuQ+qAFQ/X+9HR0Vl7VXEJGDN7UBaSb1eh7zOhkVKyZnvplDL5Z/kDwZhvjtI8M39j4jk+Pj5LdkmnapNiaSa7yO0tvSd9uG6SX8ySHWMzLV5SPy5sNHaKK18AesVTfTmPeE45g+RAxtyT8ogvKFxvWhyoDRdSvthI/ZvujBojGyM5TmPkOTVGNkaq772Okddyh0uOR8E9Uc4SZLri1N9U3dN5D2yvvM0ClFfdbMOkRsDzJOyVF/LlQZEqGNTXwcFBPfTQQzu/z8CxfO6UeHic7VKFYVY18qTCSoDG9C0HHtSuNxETHXXFz3RsD5QEQmpLn0t2pIyz19yyv/ucL26cdyZcb5uAxxOt+usvdU19emWG47idaEMCC2VyWZikUszxmLdx23Fs+q/bdpYjfKHmFVG3jdvNda3P3HI1i3/3saSfpPcxdn/XyPn0udbIX4PrFW5f/FBvXOAl/dDn9GxMysvyHS4irwImTXNqjGyMbIxsjGyMvL8x8lrucDkz+szjrgAPgJQwSFKIO57O+XfywL9ugORAPo635/g+h8sxk0v7YTkfr5jpID63O5AnMe1nTuPMdOvnmTCY9LyP8+R9nFefd/bZbU15xJ+Szuw3P9Q/VVI4p9tO/bQ9hO1Y/VPgul6qNknBbZn8gv7BSjf/Uc6kr7WK8SxGkh0o+yyW04IiHfc48wowE/SyLDvVQdfPwcHBBbBw3hU7s4qo5ubYPgZlTv7v4My5U1zSn9ifff2zyAHzsvghz2qfPnMBlfIN23G8tChsun1qjGyM9D6NkY2RjZH3Dkau0V1fcNEhHDBcEBmFxz0Rsa0ndc7nBmQCUB83CIN/FtguR0qEKTns7e3t3HpMVRC29d/JcB1IHncY6tvH5oOOyfiSf3Zr2HWt7+l1rdQTg5j9UiJLTu5j+puxEph7JYXjUPczOzC5aXzZhAGmtpKPv4FxOwuRGYBxTh3j7faUoNmPbdnOkzj1m3zZ+fctCzo/k5HVK/LHeZO+vPqZkpq2FMwWZrPj1DM/e5VxllCdDwKDxmE+4eKFdqNtE+iKfPHkPkNd6vwMYBKAJHl8AcWxOecsbzRdTo2RG2qMbIxsjGyMvJ8x8tILrjHGu48xnnJJm81gKxWXyxJYUgydkQl05pj+eeZgPO4Gc2NQ2c7vzLHJfwKTlLAIiJf1pxNUXbyy9irWmnycKwHsZWO6g/rYfJ5AFTf6gn933XBxIFk5v1dEnIfZQiAlOspDnn2clBw8IbPv/v7+heTsSSf5mR9nQmXS9qTsScLlXvNbT24kBx7nlbfpPTm5bRg/TNJpsUkeCYhpbPfbFEMz3jS/L1YJdP77PdKB79/nZ/Z3G0lejeMLwWQz9z2X2T9LHvqq+5/GIx8zoGo6p6vg47ZdVTVG+tiNkY2RVY2RyTaNkfceRl76DNeyLD92WRu03fmcHNdBJQWiPtMICUg4Hx+8pLLJj7/q8jJek1wcS/PRWDSGy8mxXFZv5xUG58OdggGwluD53ZOUOzT1PrtV7ElY5GCb2s+SGudw21BGBmqq0iVwcOBy/hLN/I56mumCIOjA6HPws8Z2XYh4bs3XZvbhOLOYSN/dN8iLHxNo0HccgHyuZDMd95hIdve/szhLcqnNmn9yPvJwenr+FiUS+Ve/xFvihcept5k8CazofzNwozxpvLU573e6HXzctt/53Bi5KyfHaoxsjPQ5+LkxsjGSxx8JGPmwPMPFREWBnDzBsF0CELZJSTIZmv2593e2bYJj6rPfFuUc7JvAz+XxOVIFhOeS4SWrzns1LCWXpN/kzEzadN4UBGuB4Trl9gRPFl4l8qrWTN+s/Lls/Ot6c3+aAWgKagcWyqXvtI/rlvL7NqOZb5P35AueWP3cLH4cuL0vbbHGm9vEq7Ts4/rSeY93EcGIuqQ9+M/9NvkX+UhjsY94S+PqMxeRSd/pzkPKBzO/9Sqb93GbzPLBTE8c0+dNVeGmO6fGyMZIp8bIXd02RjZGPpIxco2u5S2FYlQM8NacztE5maBFnhR97JmwyQApsTFhpLkSv57I9Z0ONqMEjJxLfZlQ/Ao7yU++3KFnzql+DO6ZjtbkSAknJWz/uwbay3L+dpyktxmvHvCup1QlSmOlBY3zLkq38pOe9ZeJkH18zqTbJL/s7K/XdWBLx5jY5H/+VirKmxYb5DUtClzXLntK6smv0/wpCft4nMMTbPIR12+KrWQTArf7h1dcva/rxs9Rf+5Liadko1nbWT9W8D1PpJzQdGfUGLlLjZGNkY2RjZHe13Xj56i/RxpGXssdrhQgUjoTuBJwAgD/7NWP1IbjMLmvBfjMWRLAsS8/J74TuKT25M+NRSf1vmmeNX3OkuOMN4LpbCGgfkm3ntxSckh2STrwdq4/VoAY1Jw/BTSrKORLY+j1qs6bgn9Nn2v6mJHr2f85764L8bXms0k3HCf5iSdc10caexYTDg6XjeF8ypZ8DbDnF+pitmCgnhwQZsnfdaHvXqn1ahnbXZbUHRx9Lo3pNp75i+dbl8s/S5bUh1tAEnA3XZ0aIxsjGyPn+phRY+TFMRojH7kYeW0omhKO/vHHzDyx+2fRWnJec6hEM0f29hxT51KFjoanYzIRpVvX6sO2KaHQWT3wkp5cJzye2s3Go8wKFAcvPhDJ40lm119KPN4m8Z4SXkoAngxTQkzBN5vbwT0l+TQ37eigxPHTbe1ZTNDfdC69GSslJRH3T3u1V2OmpKLE4jJ4Wx2jvvxzkt115/7AOGCVyftyDPfHpPc121N29yfqyuV2mTg2eeUWrKSHtfHSIsWribM8cJnv8hj9d23upqtRY2RjZGNkY2Rj5K5MHPtewMg1uusLLk2ajM3AWktm7JNusXpQsa8L60pPylAy9wRBJ2RC4K9Oc440vhsk/fNfzua5lIw4J+cjOHmSS5Wtte0SDhocP+kv8V11EQB1zl/Nmvonn6FNFIDiK/0qudvHbx17wnR/ok45rm9RcBs58Ih86xB1Sr/yBYZIeqP9HZDdZ10PGlsPsftihX5M0HDg9rE1b6qa0R+og+TrrkPJItut6XiW2BPYyIcSuKVFQsotLqfriG19ET2rqjm/tAPP8zv16XHF6rbPzT4O4q4/+sEslzZdTo2RjZGNkY2RjZH3B0au0bXc4RIjclY6gQtBBp05d2SC1MxodF4PYneYdG7mHBqTtwx1Tj/25/0JEskR3GiJL5eFBve/Xhlz+Rw8PMnybwJB34rhOnbedCzZzZOW9/U+s+BiIpa/8W1YPn7yM87t56QT6sdB1f1H/eQT/qavo6OjC/Zz4Heg0vnj4+Od7+zvQOuLHo1Nnt3mnJNbEhxQPJZPT0/r1q1bceGgPvv7+3V4eLjDD5O524E+4GCuY5yffRMoc5HK8/wrvn0RS53RPvTFmzdvXsgdrj+Xze3v8eGvR55t5XA/SDFMoPZ+Dvbej6+q9opp0+1TY2RjJI81RjZGSg+NkfcPRt71SzNmQvBzEsaTUkr+vG3vCYhOoYD3/ing6CA06Fqid8ejAycHcUfSfMkxvK0fT3pLwKIENnM6yeHfKUOaz+Uiv5IrVVSTgzq4iQc6tCdZ/161G2CSiTzwvNvKdUl5We3gAmlmZ6/yyg4eePQ/90fN7UDPpMJqjAMr+fLxaEP3Rbe9gITklfhUWffku7e3d7bP3yuXx8fHO5Vmjp1s5lViP05wlL5OT093Xn/t8rCvJ3TJ5LkoxdXBwUEdHR3t2Jjt/XhaKLA9Fx6UW7qk7Er0XuX0eeg/iafE4yyXp5hpuho1RjZGNkY2Rqp9Y+S9jZFrdNcXXAwcVu3EwNrVpwOPB4AHWNVuQFXt/lo3nWMtqc6Up+9ySoKOjOpjM7BSNc0rac6L+Jk59wy4qnarROkK25NUSggJpDwROs2cl32SD5BntfMKCX2JCc+Tjcaqqp1KquwwA33Xz8nJSR0cHFxYCHgCdfJEx2PHx8dnx09OTnaqi5rT/T8lOepFOmACOTg4qP39/Z0fFHSefBzqheBD4lwpBhOY8zhlOT4+3rE5ZXBZnVd9Z1slU88rAhCXg/z4YsETPOdS+1kSPT4+3skJItepZE9VYI+3FEt8O5nrOOU06tYXCe7baSzqyP3NF4lNV6PGyMbIpN/GyMbIxsh7DyNn+qi6ptfCJydOzuLOx75Vu4qjMdxx2IeO4r9wrb7u5ExYa7zqOx+kJLCwXZItJWId55wpmJ1SkvH2BD8mjDF2Hx71fuQ7JVEHO8pGORwMfXzqSue9oid+Dw4O6vT0/M07HrRJnyL19aRK2dKCQG0EMAxMyeH6JoB5O/HiCXG2eHDAVCJ2/WjBw8UP3x6VfMtt7wtngrAqkO5j7iP6q0rW4eHhhfjl4s+TuNt8zaZ+LC0YXUfig3r1pJgWCzNf5jGPh9SGtqaeZ/K4HzIWuABJCwWvlnOx4Hz6oj/p0n088dh0dWqMbIxsjGyMbIy89zFyja5lS6Enh+QEIt7q1TkKQ6dm8PMqVIpg1cb7eMIWf8uyXEgW7uS6alUgcK6k5BRoTpSLx6jHGc0CQImPekxjU8d0pAR81MdMljSP28d50lhe+UsLhBS0M9CVf3jCZhLjcS0w3GfpC7IVKxdecXKe5CMaX3rxCkkCcPJDPREUyZP+qWJFPc4WTkwulEE8u27Fn1e+UuIXH9Q1+6ZE53Zn/Ign6legSh36oscXJjMgJS/8y3FmCyoCtvSWKqPkdea/VbsLVc5DfaSqKMdnW5HzlRbA9NMEZmtg2XR1aoxsjKxqjGyMbIy8HzByja5lS6E7WEoiqU/VbjLyvkw+7hypGuDJVpU7BZ+f41zJET34RQSGWUIUJbCSUfkAJpMNKzMEWZe9Klc8Ka9kY7J3QE8JguTJl0lXOlPw387Y7vypD4PYq1tMiBwvBZsDmMbyh3c5fgpiBy7Oy6qXjjFZJxDzWKH/sVInvVTVWTVnTUaOTR/yRZP057pT7Lifu8+77XzRxIVYSs4pEaak5/ZL7ZMdZxVIr3Sm+E7ypVznldDZOOwvu/oihXN5zFG3vmifLdCkf9fN7M6Fj8G2nKfp6tQY2RjZGNkY2Rh5ke5FjFyja7nDxb/87AaqOld+qi6wvx+XIql0oIjNyQAAIABJREFUJzqIB0DV7p5nv73sCc8TtMvgTkqnIGB5AHM+Jh2Ny+BJVQFP2EkPVRUrO+KJY/O4kiH33aZEPEtKLoe3pZ5mAaNjbju9iYigxblImtffXkR7Or9e3XCbM7H6P/EuHVLHlE/VuOQLDHzqXvPrYdoxxs7+d38I1vWfFmPuNx6LWujwTUBu57RI8KTMBZPrfsaL+3NKnnt7ezv78rmVgP08mafFCG3Eth7PfBYi5YvkY7PPCQi5uEm5h7pMgMr2tIM+M49KDp33cXysNFfT1akxsjGyMbIxUv0aI+9tjFy7+LqWZ7gSE7xqrzoP8JQw1I+K1XcfZ+aEnpQ8wTPgFYTsx8+uMB+Hyk1X354YnT/ONwswr2B5X+rSKxF0Rtcvv3v1yMfjeY7JhKfjqdKV5KRtHKSSPVnt4NiemD34Z0CusQlSDtwkJjOXg7zrn8YTEVyoQ98OoLl8QVJ1vhXFq5QOVJo/+Z+3d/LqsAOJ+if/T5Uu54Gyuq8k29PmvqhyX+JxLmLIDyn1d13pe3r9rPPsOYfnKavzSf91HTl/qthyIU79p7sPyb/Iiy80yD/Hc5003R01RjZGrsnZGNkY2Rj5yMXINbq2Cy46lQOFM1y1+2YSEpMDBXZFcB6Cj88tYh9Pojyuvgx2Vv0uU3LiLR13WfTdjeoAzCqb9OUO5kmDvLgdvH3i0/lL8nrF0vXsCYhBmhxfY3HuND/lTIuH1I9A5YuB5D+Shwsb9wWd51z6K+Bj9c154RjJPj62V7W8v4N6shvfNKY2DkIpjrnQSePO/Puqi6Pkg55k+fpcB1/q2nUxS7Scl/MwZrSIcn/SeJoz5b00n9q434r4Gmsuksgrc9/MDiTGlMb3rVQzXdwOsDRdpMbIi/M0RjZGVjVGJlm9b2PkIx8jr+UZLp/MkwiPiXjrT+ROSkdhwDh54NBAnMOdk4HhCWVt/OTEHvwOAD4er7h9m4LrwStLDigOJOzPwEoyqM3MlnRwBy3qjPL7w8G89S7n9yRIchBhhVB8c/uAv6WL4yZA8gSabKOxqCf/HY6U/FMwqqKXqiUkxoz7pffVvARlr3KLB/4mheuae8ddb57cU4WIOk/6pM+4Xznw0lf5cLVs7briZ9enf14DPbedz8WY0TaNGRCleE88UA/p910Sn75gczk9j+l8AjfPGYk3fp6BVtPl1BjZGNkY2RjZGHl/YOQaXTt6KmATEx7Y/MzEyKtWvnklEUGH4zuw+LweKGqXzs9ALJHasnowM3ACWyUIjsEELP14UvKH/jxRryVs8k49OWCxOpEcTu1cLvHjgEPe00OLSQ6nlDTII+3hfLjPUR6CFuehL7u/uc95HCSZ5ONpQcM+WkD4AosAQv/14HefdN1xm4XrXPOxikn5PcbcPrPYpV/Tn8k7fV1zCWzYn/YcY5xtH+Hiwuembjz+yTtl8C1gtJ8DrI+TFnVuGwellCN8DOczxb4fS4usGc1ySNOdUWNkY2RjZGNk0otTY+RF2zySMfJan+FKimRgJIUTOLxi4InRK1Gcl87ggc3+bJ8SF/+mc5QryTtrOzOIO6+3dxk5hssj4sOSnhBYoZjJr8+0mT8wS517NSxVTmkD3+7hcyfbiLzS4cl1pjPnI/EonTooJF7cl1M1RHP7w6SeVFJVzKvdrBrRHuQ7VTEdKD3x+rYVJh3/zH7+mQs69w/qKIEL/VJyut9yPuqQn+nzrLCRH7VX28PDwzMgTQnVfWgNMK7y2ft4FT0Buff3aqjzSDmUX/VbNxyLb4CjfSTbzNZNd06NkfO2jZGNkY2RjZH3MkZeywUXHdO3njBY2Dbd/vWEr79MWjrmSvcgdGMwOOmcbkApODkWg1XjXAYoHpQ8R/nUjv88QaX+rld3Vh5328wogYbLlWRkHw/4xA8TywxU3bmZMBwgZwCVAsEXHImHBGqpbQIkHWcCJfDOtqisAXzV7kJBWwrcnqzg+VYVxlgi8UtQcF7c1rN9224/f3tRSorehp+5mPR5HHxTjFD/6uMLIfeVxIfnt/R3Rq5X2cIXFOlOxFUWgEkOX3zyWAIjX0CwfdOdUWNkY2RjZGNkY+T9jZHXfocrKTcJl5KflOiGIqXkNBM0JSXOn273emXFDe88MZklXtWGPPt3BprGSwmfAUP+vQLg1Q7nidWLBBaeZC9LrP55BkDpdrF4cb1IFl9oOND5a0AZ6L5AEZ8p8B30mfCZwJIuHPxnicb1s5Z400Ip+bD49PNsJ7+hr1I28UT9zpKWx5T7alqAeKxfFdxpf771awZyrt80h+sj+d0auDggeZ5byx+ktPj0vrM4Sjy6b6Y7FYkXX6Snha3aNd09NUZe5FVtGiMbI10/jZG74zdGPrIx8touuNwRGNhkTLd1KbD6e+VN48yAJzk9gYIB7v2dV+dbPK05JZNHAlLniTrguP49JZFZ4iE/5JdB47KnYF9z+FkSE+/SgetIx91hRUzSs+cQvMLjweL8XyUpu4wEqFniW0v8Sac8R534gonycYxUqXFgnAW4y+8Vaso7Gyv5iycWyTFbLHmF3nVJnhwY0/wOYB4fM549Qc+qYAng+TktONyfEy9rwCo98hjnSAtLzuFx7jlP416WJ2ffU15oujNqjGyMdB3peGNk7bRpjGyMvBcx8touuOgUvCWYFJkcwJOjt+d3H4/9FCzce5kczQEhzT/jwR3BX03pQZr6zoDTkyIDODkE2yRnTLpiUJHPJLsDFNu4/WZJluM7L/SVtCDx+ZJPpQBd44tyscKX+ngC0S12Bzn/MceZv1Sd7wl3m+jvWn8HgttJ1Elv6U1oM59fAzOXcXY++WhKWMkX6EupajjzF+dlpj8HxWR/lzXpLsWXz8uxZ0C+BpDOx1rMzwBy7djs/O2AS9MuNUY2Rl4JI8eo4w/9tF2elqrTvW3fRYe2MtSoZTmtCrY+s+HYq9Pl9Kz9Ge+1HW9s+dueWyrbN+HZZoht3xFyd23mORt/nB/TfKfL6c6xpZY63p7TH/HJMWtZtofP+54uF18QQT7LU9io6EPL6WmNsbejizU6+Jkfrr2f/jeNkSZrY+Q5XfsdLk2u777vkkDii1ZRupWrMZzSMQcGOcravtxk5OSclMX5TcnewcMX1l7d9HHd2ejQY5wvmF129RO4p/ETgHrwpaDy8ZclPxfgCcCP8/Z82jubeE4BPQtMTxqzIOV5Xnh59YltXV8k+rx4mv0go4+R9Mx42dvbO3srlI8xq+ZxoecxWFVnY7KiSH6pG09atCNlSQujpEdv59thJJf7oc9FPXA+X9i6HOTF7XFZkk3gkHSQFnKe//wOwGwBl/hNPk7ZlSf0UPBVwJsyeS5oujNqjGyMvBJGVtXJMz51aoc7oZPLmzTdJY3/43Nr/NQPNUaC7keMXKNr25ifFpLucL6P1heBVRdfmcpjM2HoXHJs3n5Pzswkv+YInnCTwdPtztTHF7nUkwcU+3NfvMbgMXci7+vnrjJHugCaLbhnIOtzJjunvi6b2yHJQ1lSAMxk8jYJAAmYLhtfV+s+u5ZIki95hZYxw/6MG1LSJWVRXKRX50pWJRzfwuMLBo+BmWyJ57To4D/5xmwLjtvBdUm+mE+cl7XYph65PYSUchR1zUXmzDYJsDyfiAfqxfOs+8MMMMiL64Pz0yck04zfpqtRY2Rj5O1gZNMjixojGyMvi927vsPFRVtKtElhUgiZ5t0OtmOicwVyPl6l82FXvyqeKYUVQ0/4DhIzWfl5lnBdJy5Xkik5yRj518zp0NIFj7kdfFy1IwCPcfHNRz6Oyyr+OI/sqLcCJZtSbspJW3oQ0/Zqm/a7MyHSlmkRkHTkxDFm+uRbkiiX80253bbUAXXtx9Jvz7AvF1RMsvrRSS76aB/vQxn9GGOGcye7uu64vZHjpP3XabHHcZPd1N77O18+NvWW5nPdSg8zgEvxwq2qqS9lSDFDPaovXx+cqoMpdyaw8zzSdHvUGNkYeVsYebpUHR9Vffqfq3r046s+9ZvqAv3wd1R9z9dUfcBHV73n0y+ef0PQQ79V9Zl/sepNn1L1iV/7O8PDGj30+qrP/KDN5zf/vVV/68uq/tOPV33D83bbfdCzq97lT83H+ZT3r1qWqhsPVH3WP636r6+o+ry/sjn39u9W9Zc/46xpY2Rj5BpO3vUFV1okixFfkM62pohRH4N91xbFnMcX5TQ0xxHPPkaSy/vTAQmCBCIHHq+WufwJ9FyfdAoF38HBwc55OqfOpe0cM0Bj9TP9gJ22myWnmlUUq85/SV7POqXFgweixuDcp6enF5IOgSsFIn2ASY6y++9z8KIjBRn1PQN86kXHNebMDrNkxQTIX5hfluXMzkyE1BcXbrz4Ojg42LnYYnXI+fAFlPOfkq/sIXv7ooQJbZaUXd+u07TATG0ku2Tzi8okq/OlMZkLHCx9bo7p5DHAcf13Vmb8+XjyhVk//mUckOgvau/2aro6NUY2RlKfl2LkwX4dLadVL/qeqic85cIYVbVZ6D/uSVVv+Tb5/BuCTo43PL7FW//O8TCj46OqZ/+pqpf+XNUnfHXV8z+i6vS06j2etuH5vf9C1aMeU/V9X1f1J//ifJy/8yerfurfVH3RD1Z9/PtU/d2nby7cXvqzVR/8CVUv+t6qn/iXVb/4H6sq5+bGyMZI0bU8w8Uqkyu/ave3Rxx8EoOesHWMgnHuqvPnThKocQ62T9vOtCgmL+48Xnl0OWbgJUpX0un2anJyHiOw+I/h8TMX+QQrt5PGoX1E/vsx1JUHdrLzwcHBTgWAwHSZs/J2tV9geRLTXR5/Zor2k5zJBu4LM//zQKc+9FcXRloscE4uDmQvzkP9Ofmig3OuJXXZzuNKF17uC1wAaWwuMMQ3K22MCSYkze8JnHP6hbafd7t4bHobt2eqLCoPUK7ZwjKR+77nFM6dFlqzSl3yO/efdLdDY56cnFyQifp1sJyBu8eF3wFouho1RjZGzmx0ASMLMv3Gq6qe+U5VT327qs/89qr/5wer/re/fX7+GX+36ud/qurHvm9XgV/6b6s+8X03d3lGVS1V9eCbVH35j27Ov+w/be5OVVX9sfev+t1vX/Utn787xqd8Y9XbvvPm89/641Wvfc1mrK/56c3F1l//o5tzr/zlDY+//12qnvP1Vf/uu6q+ZvsM2jOeU/ULL6760e/ZHftLfqjqk59W9du/VTVG1Vf+RNWz/nDVY9+06ot/qOqX/3PVc//Cefs//SFVH/KcujItS9XPvmhzUfp277q5G/cL/6HqWc+v+tqfrnrsk6q++6suH+dnfmTz9x3+6OaC7ed+bPP913+l6tu+oOo1r6x68u+q+vBPr/ED33jhIryqMfJ+w8g1XVwLelJwCisGtKgmQIhJOgV/XV0K8M+z81W7iwG/xar5uI3AE6y2frlSVX3gHA6emtNBcub4M1B0PfKY91c/BorGPTw83BmHdzZ8fHdugrknD7WhLpON1F/tOb/4VkUv2ZcB7/7k+uQx+piDLeehTalPgfDh4eHOWORBixZPfGOc/1aMEhXJFyX+2f1X53VRpRdbHB4e7iTCqs2zVynQ6eunp6d1dHRUVXUhqe/v79fBwcHZXLQxfcP9n4sMyux5IFXmNSbtzEVIqgaTh2RPteE5T5bus4xZ6tztRn79u8ej5ybOxb5r7Umek7jgJY+yN+Xw3OA8uU45ZmrfdPvUGNkY6Ta6DCOrarOl8OO+tOpHvrPqeR9c9Qfevepz/s+qP/YBVS958eaC7IM/fnOh8pIXV33M51e99lVVf+UPVj3n6zZ3Yl7yM1Wf9e2bi4WPfreqV7y06tnvs9ki9+f/ZtULv3RzMfLO770Z42nPrHrcEzcXRL/0/1Z93HtVvfhHqp77gs0do2f8vqr9g6pPf8GGxye8+Yanj/rcqp/8V1Wf98zNRc4f+hNVX/o3q97ibaoe/6TN2Pr3V9+56pP+cdXLfm7zfVk2f1/6s1W/9rLNnaWXvLjq8U/ejP30j9rM9Q2fVfW/vnXVv//BCzrfoYPDqn/8M1W/8erNxaLoTR5b9TbvVPUT/6LqhV9S9eHPrXqvP78+ltOT3qrqG36u6qOev5Hzmc+retJbXci3jZG73+8XjFyjaytXMmFVnVfefUHNz0xuPO6A4U7J4yJPXE50Uu/v89Px04WC+qTkTl2IH78w8KC5zLlJHjBVdXYB4ORgSYDwcwpo8Uu+vT3BNS0evCLhQcRAkDy6ANPYnkykM1UlFDTUhQM45VKA8cLLE5l49Isp58ft7UmP7VndYozogkcyu8/TT5w32YmLH+mPcvGv+5jG5HNf9Dmfm7J7xdovzOQDbOe6Skk9Hee47keuUz/vsSX7eH/PR/Qhty0pxSLnmeUN12fSAY+57KndzB5smwoA1G0al3MnfTZdnRojGyOvhJEUaW9/cwfl+FbVq3616oE32Wzhe+wTz9s8/s2qbj64+fykt6raO9hctLzZUzd3j6o2zzAtS9UrXra5O/WqX6m6cXOzZfH1v7k59ujHb8d78ma8V7+86uhW1a/9UtVyWvWU37M5/6u/uBn3zbffDw42PD3xLare6b2qPuIzq/71t1R9z9duLnhuPlh181FVf/+7qp769lVf8X9X/fbrqp7y1Av2qKqqJ75V1ad+8+bzi3+46mPfverbvnDz/YOeXfWPfqzqf/hjua9ojKrf+weqXvjyqr/3zy+ef91vVP3mazYXqg8+en0sp4PDqlu/XfUFz9pciH7kO1Z99afsxGpj5Pn4jZHndO2/w8WFnC8OSek3QJJyfAGoz2xHRbmB3Jlp/DVHYeDwfLp16IbyIPIFte/vJj8z53TyRXQKMtmC+9RpB8qhisDR0dEOSMp2HnTen7xwC53O37hxY6eK686u89QrfSPZl+fdN5hkZreZvdJK25EP8pBs73fP9Ka/qt07SfzHZxLctyiDkk9VnVVkdWfQ/c+rWqzsyvZqc3x8vPMsWEo0HJdbDj1+bty4sZPoOIZXvVNVjP7A78wfmi9tBU18yZ+pD7e1b//wqh/jlluOkv+lZ/PUhnq4bOGcdOPysQ+rp6kffSzN6T7nOlB7r1I23T41RjZGUr4ZRp4+EkPsF15c9Te2F0EnR1Xv95FVj3ps1Qv+wXmbxzxhc2fssU+sGiv229/fXLh9z3+r+rHv39zZ+9bP31wYPePvbp69uoyOb1V9wJM2d+q+0O6G/eC3Vn3lJ2+2KH7Ax9y+rK94WdXnfnjVc7+16tu/sOpPfFDVz/9ULW/y6MbIxshY2Dkba3rmNkjKpJLpRFz8+ULXr+ZdaCqHbZmoWbFzUHAw0Tg6547pIEDlLcvFZ1jcOXy7gf7yH43DB/icV5+DPJM3r6akq/xULeB8rADSOTmXFt8JVNyGaQyNrd+REtF+HpAuD53ak0NVnW2L84B3XfLCx4PLFxwir15IJw60XlmV39AfPDDVhomMbalH/eVF1snJSfx9LsmopHN0dBSrt4kH15Xzpba3bt2K+9Y9VkmS33VA/9Fcrhf6Em0280vazWM/LfycL47l/2Z64hz+cC/9i7x6vnNbekwty/kdUrZPVTa/GOdf8jrjI1Vim65OjZGNkVfFyGVlwVb/7jur/syNqq/4xM33L/vbVf/6W8/Pf9Qf2tzd+qe/srkbthm86mmP2VywfNsvbbb4/e8/vnk+6TP+l6o/86FVH43ntz73L1f9+PdXffG/qXrrd6j6pv+8uUj6gDfd3An7/tdv2j34mM14L39J1bPeper3vdPmGa5bv131nV+5uUi6jL7vdVVLVT1te5fpNa/cyPe+D1Z94JOqPucZm+NPf1bVX/qkqi9/9ub8i753fdz9w6pv+cXN3bgPe7uqt37Hqi/615tzJ0dVRw9VHd6sOrxx3ufv/MnN2L/6i+fHvvc3N3ccn/bojT6//b9Uvdnvrvqyf1f1wKM2L8z4gr9WVbt3iBsj71+MXKPhk94u7e3tLXp+ZG9vb5psxVhamBJEBAwuFMfwcTWvK4CUXljgbX0uOp74Iwhqbg8Y/6624iEBX7p4mI23LMuFasLMKf2Cgo4v/pUYtChgsLqePcj84oT8C4B8kcHAZYVv5szLsnmjnp5j8kAmX15dTW15oeUXe6yQJP0mO6taeXh4eOEhTK+eJr7TXMme9PMbN26cPY9FOysW6Wdc0Om7+NEcrHT5Vk0lLfqB5FSi5++RccHkPkRiHHiskQ/GnPPu/uxtkt1SwvYFIPXkfj/zB9HMV/Q35USX0/tzAe7Ax2NruYAyuTzkMR2jTEdHRz+5LMu7XhC8KVJjZGPk7WDkyVJ19H2/tblQ2hzMn88mqKrn/M+bi6Sv+emqt37HTbuqzQXE6UnVDxxtjmlRuSwYc/vfP35u1Td99uZlGe/zIZu7UBqHMo1xfpzjTPkbtbmq2p4b+DvG7tgzGrXhZzndDgUeZuS8uew+hviYHecYPv6oOnjB8+vGN3/Otllj5P2KkdvCTDTotW0pZIVHzHBB50IlBVxmJG/LeZKzaO5kCAJburJVMGhB7TL4lgbnW+05d5rP5XIi4OpfuihQW1/Aq+LmthARSJgklBw84D1QEw/Op8/p+p4FpOY7PDzcufPE56Jc97QrK46sBM0CTDyPMc5eUOFz+gWcAHhvb+/sooTfdceNOqadXHavROsCU9sUJeNDDz101scTjD/bpTb+Q6ca0+1O/XEOr5oxFhgrrKalv253T6Yif7uXX0yzb4qtZC9997cuug9TJ7JVujj2vsmmbOPxk6pia77tdvHtrB4DfswXg2netOB20G66fWqMbIzUfGsYubn+sUX/7HNV1ed82OZlFfsHVX/tD1d9889vnrd6/ydsJ9yvevrjqr7vN3fH4Dgv/JKqFzx/M8Y/eObmLX7v/n5U8gW9x3ESf+dSnZ9jm9nYcb69qqumoMTb2vEZH1eVfW+/lr2DqlF1OkbV2Ob2cVx7Y9Te1l+Oj49r7KM4vLcUvXrU2F6bLjW2dVC1c7zY29/eadqOv7csmzukY1TtbS8Eauuby1JjOW2MfJgxco3u+g7XGGM5PDw8Yzqc30n2M7DwhfDawioBEft4P5Icyd9SwvNOHD/d5uR8lyk9GUbj0tFZBfXKo+aRIxEMpG9WDNkmJXo6tTuqHFCLc/5uE0FS310/qbqpMQiKiSSjXpfLu1sE9VkVQ3+TzxEcKTftwAVI8q0E9K6TlCyUxKgb+isvmtL2S/qGLwyks52kHCpeTGYug+uA/jR7zk5jzBKp2yHpi+2r6uyOZprL284WiilnUK4ZeVJ2v/ZFLWOXoJNyhuZOdxX1L1XwWGigv6ZcRzuqD3NAyrWuY7+TovO3bt3qO1y3QY2RjZG3hZF7e/X678bFUVPTNdHBV31S7f+zL22MrIcPI7dbGB+eO1wuPI9LaF+4UWne3hepHNf7pkWa33ZVOxqUlawZuPlCjsk8LRQT6FWdG0QVAb9oSJU87sf1Rbk7KOU9OjqqGzdu1MHBQd26desCT6J094Myux0FbjzvD3gncKWu0yLBb0NT39y3rwqjPu/t7V24+yE+6Wv865+p09kWEM3LbXokXySlhOX+oXjwB0jZxoOYCwza339HRhembu9094q64rNfvKDjAkl86OFf34YjmdPD4/RX+sasCi2biD//nbCZ3i47n+y1Rs4X77AxhqcLJ+jF8xp9Ly14XWfJj8kD27iuuRVqTW4WIBS7Kcc23R41RjZGsu+lGKm3ZixL1WtffWG+s3ntVs/ufZI6+/mtq9BV2nobfRcX/Lzb6qpkI6wNvJz9V1XjguzO77hwxhu5ZBt9jtBbmj4/t1zQwxsVLbX5CYAH36SW092ihKgx8g2Dkde2pVAkYVOVi7fueWtf/SSQHvL3BagvaF1AGlf/Dg4O4laomWPQOWZ3DDgXQdLPc4xl2X0wkPLy4kHHEn/sJx3RgV2n0h2f9aFTUQ53VjoV+yYg1zzkL/Gs77qY8gUAx0gAd3p6erbg98AiOLqd04Udx9SFgj4zkGc/ckm51I7+R/3xmMbz5wv82QVd9GgBQt50XMf8rYiMGz3ozaRSVWdbJqnvBP6ygVd7nd9ZH/qXvvOCzv2EvqW/3NLgvuHH/M7drB9tQ14oC2VIdwTF22VvXyKvJG05TdtcpSe/KE15jzwzDtbaej/ZlPqRPunLfdF199QYeX6eYzRGbnWyt+Xt9a+tB//S776Q9yWH46jePMvfaeQFfMpRHM8v9t1WfmfCL3STr3Icx8N0jHn7djGS5+TXaUs+5XOM5Dl/dMHjhrL5hQr1RL917PS1CfGG38kDY4U2SHyd8fuBH1snf+NLdo41Rr7hMfJaLrg4uSdRMuuM+UJJ3/XyAU/Svtjz+R0QWK3nlSiTjfgT8RkU3mZMbUV0sJSsXE7XiTu/VwHc8T3ZiUfqiHO6bjyx+jk6jS+O02LXkwLlFqXqLgM/BdIY42x+PcfFJOXJW8DMwEjVkZQ4HJQI8rOk4/ojMGvhVLUBDgE/jwk03VYM6mXZbFERmJ6entatW7fO9OZ80xas5mhebm1yHbgf8a1c9FfqNPk7+3sl3oHW558lbbaZgUoCRI8VzwEpd5FH8cFq+VWTtN9J0DGNq22yKbacJz9HWWfFBC5S5UfpItt1SaI/Jzs3XY0aIxsjr4yRp7t2a4xsjOT8d4KRPm9j5IYeDoxcw8mH5Q6XBPTJkwN5whHzM5pdzTKpucKrLiqJQeeGEv+s6Pj52RU7P7vRE8ixHSsBKegcBD0gWPnhOExUbOfnNLYSORMpkxb50zisOLpePQG7A3vydZ3zWR5PvOSBY7h8budkd7ajbj2QUuALBMbYbIF74IEHzsbTGxbVxsFFMno1zWOBOhegHB0dnb3qXYmeiwVPpAIwycRKmgOjqoP6y7loD/pESjzUUYpbkceE54rkV37nbWZ7XzBRxx4nbO+xO+vjC0+ku69/AAAgAElEQVQCottuVr30efWd47A/ZfDvugviVWj1T4tjl9X58vZNd0aNkY2Rl2GkU2NkY2TVnWPkSfCpxsiHByPX6K4vuDQJEwCFcod1B/Ereo2VBEsOlZTtvGluApzzq/Z0VBotAVwCKJ5zuQmWDgisSnmfBI6cw/W+t7d3tqdcSUbj+auBUxCx4pKSgAdzqrA6WHu7VN1Nr+f1Sp2/Wc/14qCncSgL5+RcmkO6c9B1P5Oe9fyUwOHmzZtnn9VGcghYpGMtWPTd3/aVZBtj1IMPPnjBf1ipJu9rFVh/OJ52pEzS59HR0Vk18fT0dOe7qkN88DTpmuQ+Qh/yRVnydX52u1M3M3/wsXjMFzWMI9nf50u5yGVPoOWLZJHHIiktNGa+yr5cOLhO0iKDczkPTZdTY+Q5NUZeHSNJjZGNkXeLkZsDF5+zkj4aI68HI9foWl6aQSY92biyXeFuGJJXsjhf6ucKYPB4EtZfnqMzM+kzuDknZVlzVg+Kmf7W9Mj2qfLIfrxNzXEcuBN4UibKxmoQdZbAiHrSX38OZ2ZT6kjJjmBC2yR9E6hdBudV5LeVPWkxoAQOAoX9/f26ceNGjbHZ5sOqHKt2XqWj/8yASvyICBI8xgUEZaCNCES8RS/Q0GduMyGp/c2bN+vGjRtnutQLN05OTs4qe69//et3tnfQZm5zki+aqCf3e9pU+koAxrESqM2+r+UMt5PnOB5PY3lecFl8oeztkm9W7VblvJ0vCF1HpKsAR9PVqTGyMfJ2MdKpMbIx0u1/Oxg586nGyDcsRl7blsLZ1SeZdePLQej4VbtVCncqUTKIxvKKD9tpvLXFG+d1ct7JjxuPxAqNJ2IFGpO+J9jZ55nzpnGczxkQUvfk2ykFp7fjGA4Ivh1lBpQEMY2VXlnMIHAfcP9Mi560cCB/AgIByMHBQR0eHp6BBrch6DN/9Ljq4gtDxKsvVFISlEwaj3HhIEs7eNWaPPDhe26zSbbwxQ3B9caNG2eg+qhHPeqsmnfr1q2zSp8Ahtsi6CcpefP7LHZmY/D82lshExjxvM/nVbyrgJQoLW6cD8+JyQbk0V8okPqs6c1lvWq+bbo9aoxsjExyX8DI/d0XRzRGNkaKl7vFyFHzLcuNkdeDkWt0bRdcXEin25oUSMbwqo6USKOlwBexLYmK9IpUApdZotQcHNcTkDtUOn+ZIZJTs9+sclZ1vh+WuksA6/pzZ3cAXwswH29WlaGOuWXAtyGwrQOAfIRjqWIkfSgZeuUu+WICX77tiCS/EWAIIPb3988qdTzHyh33mxPs9AaplJgT0Yfox6mKtyzL2Rt93DYzvUjPBBUHV/VJMe4VQ4138+bNeuCBB862U6i6d3R0VLdu3dqp7DkYyq6eBFMCpg59EeJ5gMdSrPpCjfP7fLSJt+M4s9y19p12TgDrfPO32hwAGFcph8xA0PPBZcDbdDk1RjZGcnzOQYw8wbCNkY2R14qR21fdN0bu8in/uA6MXKNrueBaS8ZM4p6oPBHPFOFjUkBXjPfzpK7vXtlKVSpPPAywFPwOADw+q5SpbXLaJHdKBGOMnVebul45TkoIHJ8An+zHBMO/qdLp8q2Bb+rH755UOS+rWklf4iEtPEge/Pv7+3Xz5s2zv16p01+9HYpJ0HWl6hG3JiSf8O+6QJ0lyJk89N3L9LqW1GaLLq8ySkb54eHh4dnxmzdv1snJyQ7oHh8fX3iQWePSngS2mbyMi5kcPMY48TZpEeV5YvYGuBnN8tjsePo+m2+2YE19Z/pIx9h/zc+arkaNkeftGyMv8r2LkVlfqR+/N0Y2Rl4lV/N8Y+Ru34cbI6/ttfBiwhWUvsugnnDpnDQujTcDHPZPwTfjh2P5ea+yeNB7X8rnPCbZOGcCSB+LgOcJicmLOmDVIt0GVXsFsctAnlzPlHEGximhs58DgficAT2BTrfnvdJAvc8AJOlvjHFWkavavB72wQcfPKvg6Z+2SrCyRz1ybsqnql7yB7cJFweevJTQPDFSDk+WKSHSJgQ+6WT22xfO/97e3tnDwBybthMPBJPDw8OzbRUClhRvlF8+QJD0O6aXJXe15V9RinPGEXVAvXJ+n2OWA1IuY6zwOOea5TAfK/F1O2OR56vqtSlTY2Rj5FUxcm9/f6dfY2Rj5N1i5Dlzu363Ro2Rd4aRa3RtWwrlNK6wFOSeqHScfZ14m5iJ0RO+b8Fwg3F8T67kOSVtn1Nz6ZzfXqbT+IOW1FWqiFEeOrLf9qUMnPPk5OQsKaZtFNTrzGYCfCVLfXZ+Lwsg8SEdcPuD610ye5WLiUsy7e/v19HR0Y5+OR7H9YTs8vIBXwGKgMPBhFU832Kg79Kt/IN7iGeJTvzNEgn9IP2mDG3KcTQ3v+s87Z/8whcw7n+UVe24KOH2DbVnlZM6Pzo6qtPT0wu/L0M5KR/H9O8JJPzzDBBmCTct/pwcPD2e1uImLUI5L4+tLZTczzyPpIUh4+EyP226M2qMbIwkX1OMHPm5uMbIxsg7xcizNjUv/DRGPvwYea2/wyUnmN1OdONU5QdlPTDo1OpPgdme7ZRwWN2RQdN8XoHysZksCCqemJIc6u8A5m0INvruCdrbOyB5wLqTSg+udwK1SD86KNlVqeF4/Os8+DnakiBFmdlGPOp3LjTeyclJ3bp1awfImbwdOLgHXef1WcBw48aNnYd9b968eXaLX6+x5VuVpJuq8wTtCyTagrZPt6CZmBOgCMxpe7c79Uy/ZiKWnsTTrVu3dt4MpTc0cX4CO6t6GttlYvXYEyr14tVT2U/VQC2MpJOUgFNSZ6xRJ74AJH+0wwwAOL4nY0/2GivZmvaajc04neWMJPvMr/h5toBMvLuMTXdOjZGNkZdh5OlyftGgbWeNkbu6bIy8PYx0/elcY+TF73eLkWu0esE1xniPqnrJsiyvuKRdTLxOzpgb3BVPYdOYPpaTnIVJKhmbx2f8e9XCgz0p2xMBA43Jx3n2ZDDjiXwQBKs2IMDAdfnZl4DB+dMFEYNrxht1qjFUxeGtbo6rW+jiIemSSVby6fWr+qy3/TB5qL/G1F5pgYP2mOstQgIPVe4IKuJD9lsLMso6WzzMwJiy6jMfJvaErr4eDz6+qnhMJgSolLQIFvxHHyYAqS1lIdgQ2Jks3e6q5Gkc34aR9Oix6Hr0pD0jgoXnKR+H7al7LWTT4oF+kfIm7ZCqcbM5Pa/R7ul8IvcrzpvennU/U2PkLu883hi5S1fFSOa2xsjGyOvAyDPfWxojH26MXNPZ6gXXsiwvWjuPdjtGI8OerF3Zs3M+DhVHR3HHSoEkQ84CzJO3VwBdVlbM5OyetGbA5T9c6M7KsRSw7hju3CkBsPoxAyV3EiYAJa61Koy+swqYbMTXnKqtxhYAVJ2DmsZiNXiMcQYgriPJJLBK+5fFl8bSQ757e3t148aNeuCBB+rg4GDnsyp1euBXAETfk/4kp87pOPVH4mtnkz9wDq/SVe3Gjj7TF2lT8uI+oPkfeuihs2TNrSds61VFgjYBSW9zEj9aSLA/F+60O2NVf/mgsfuYX8Crvcc+5fB4SsmTunZfEjmfM53RpjzvzyrQHs6z5vNj/O650+NS/Lj8nDeBxUyGpsZIfXZZGyPvAiPxWvjGyMZIt/udYKS0crqcViFm5B+NkW8YjLzWLYVkzBlIxvHkyc9Vu0b14E1tHCQUhB4E+qu+npiTI9IocgS/5Ur52cfnIm8Mek8Ya07s8mtsJTVWQtYSvwLv+Pg4JhONq0QuHn1LTAI3T7DLstmrrm0Oquwx+aovgUxyPfTQQzuAoGTCoDw+Pj5LTn6RN8Y4q87p7Uk3btyoBx988GzP+Y0bN872oN+8eXNHZwQSjus+xy064o3APgMM2pRJlQsYJVYnrw75AoJ2TQDDyuDR0dGFhQh9X33VTm1YnZXN5AOerGVD394i33rwwQfr1q1bF/hVG5G23NCXZxXm2RiMUfcb2ojtNZbrhDFPvlP+Sjx6vvHj5Nf7M4/yeYhUbSSvzgurqaR0rOnOqDGyMXKOkTfqdds+ym+NkY2Rd4ORx2PUaVXt7+3XwVaOxsiHByOTX4mu7S2FnrQTE1T0DGicvG3VbnVNbbwPqzo+P4+pPZM9kyDHk3weYMmwLteaMRmQPOeVQY3j35P8+sukQL4V0L5/n3vQqYfZPBzX+fO/h4eHdXR0dNbOq0nuP+KPYMDP5EEXcqq6qSJIYBNQ7O3tnQHIAw88cLYvnSAivfGHGj0puA/RLvIjbSMQqAhsqFfqk4swJh9/sJZyafxUxdbxVNHc2zvfpiJdayuJ+3VKIrJXAgRuc0j+JL64MOCiRfF2cHBQR0dHZ8e5eKCc7pMeJ+JP86dFoo5Jp7PKH4l65wIt5cEE0PqbYnqNh8sWqtQJt9b44lVjc6HhY3HbR9OdUWNkY2TiL2Iknv9qjGyMvA6MPNNJZcwQD42RDy9GXssFFwNATEmAlMzpKLpCZyJhRWxmRCpIbVxo9fFKiN+m5Lxqn+Zk0tUc6RZr6jcDPzqMgyb15Qnb5fPqAC9o6LTJIfxhWX/g18HZg1RjcGwlJ/U5Ojq6oDcfi3pwgK3a/FaF/IU860cD1Ye+oYR548aNs33m2oeuPejcl672SqhMsvIX+af0xcTLc1Wb1/IqOYpfJkTy7VtcaEdP2JqH/u5VGtnNQZug7PvKE6iLB4LcWkV4jHEGAF695Zj8S73IF/whZfkVeUmxkRK2+qc40nz0XS5YCG6kFK8+VuLN56YcvuhLRL1zPsaPLx5muk/PjfjieLYYabo6NUY2Rl4ZIzGvMMH12xjZGNkY+cjDyGv9HS4JQXLlMPB0t4NXzd7HydvKEL7Xne0dqDgO50vGc8MpWXFMzqFxFchMvu4kvIL2hyTJK48zUDQWK3DkWXMkINWYBB1Vcvgr9QpkypWCUH/1I4gnJydnD+dW1dmDuxpDiZtys9JUdb5vmZVTBbrulp2cnJw9uCveuajZ39/f2XvOH2nUVgnxot8s4dxcqNDGBDvZybdG6Dv9ixUvT1rSmydezV11EbRJBPzkiwJZgpkvFCiTdCkefRuBJxlfVPqC0B+QTn7Eaie/J/BM41Be+jnlpE8xnmlvH1f8+NgpLryK5rnM49NlSbnPF9i+UHS9emXOn6Ok3imT88W9/t636erUGNkYeXWMPOf98PCwMbIxcof3u8LIpXZkaoy8foxco2u7w8Xqho5ROVK2J1SeS4lSbXwsnqciRepDB3JQm13dktKtQipY5/w2rPOS5qCOKINX+pgcEs9MTNo24HqjbhmsStDan1x1XnGiAztYkV/plr8/4lUj6UcAIlkeeuihszln82hMBgMrPQqcMcZO5VFbH1i103dV6x544IHa29vb2Rah30Shvv9/9t6lR5LkOts87nG/ZURmZXdXsSlSoogP2gjiguvB7PUL9C8G80Pmh4y2Wmg9m5GWI0H4BAgCRVHdXd1VeY27x8VnEfmceP20R1ZmVRaJTzQDEhnh4W5udm6v+WvHzBUwNHDEgF2nt2jHtFX1gEy4tz6Mal15ftgRilz9GPSi3REs6+QTB3c6+FHgoz1Rd9iO1olNqVzo36nBHrbDix6zLKu8O0aZPAagMZjG/sc4ooBSF4O0Tm1brDfGHJWjxpqotzjwqLtPBAGtQ9Nr1N7rdBivV+BUm4gxO8YH6mOQpYPDVJ5XEkYmjHwyRnY69CxhZMLIhJH2vxZGPlZebIZLHS86WF1g1SdcPTeeV2d8MQBHkMBB42/arnhNPE+dgOM6/V8HZKecOoLkqXvVteNUUePWa3A2M6ts/6rOxOLXzWZTCWaa021mHtjoO9dTp6ZEaLtUPxoIcAp1Dt0uXtupzqkBnTZst1s3ctpOwAOAYOpg6/r9vvV6PWfsyFfPsswZQGW1YD2i7tSuCfpqlzHnX1mXaEfIpQ6o1Ue0Lk0zoZ3K0iqDrv7HvZke1yCjdlM3MIr9QHcRUBQwVX91cSCCV91grw7UtG8xiNYNdPT3GDi1nIoVMUBzb7VJ/ayDO+qru2dsT53t151fFxN1sFk3MI7xKIJWPFftS+NCnJ1J5WklYWTCSG3XhzDy4UxfL5QwMmHkJ2Ek+snqyYCEkS+HkXV1U170gUuFVjc1x7lm9Qar50Tj0rrUsfT+Kux4bTxHj+l5UcAoJ6YkxDZovWZWa/x6LxwwBqpTgBIBgfNVJtpXsyrTpPdR5zKzygsTtc/oSHVFnQTeyALo1D5BHXvQoMAWqQT1oih+1FdNUVCmE5DT9qnRdzoda7Vafn2v17N+v2/9ft86nY51u11PoWARMICiOuHefCcA056oF5V1nf2pbdTpCj3EoK160yCg91TgiQMcBToNznHAhG0ha02XqGO91I7wDwVkBiz8HtuiclWfUMDVQUZkj7QdsU49Tt/0f6wjXlMnJ60/zk7UAVAM7Kf0GW2Mcgo4om4ViJBlnQ1G2cf2ah+jHdXJLpWnl4SRCSOfipG7XXUgmTAyYeRLYKSZWbmvxzv6pv9jHfGahJHPx8gX3RYewdSxoHUBLXZWGSI9JwZmFQaGFwWldT729B1BiGPartgmSp1w9ZrYX47jGHWGFsFXr8F59bzYbhyXBa2AQl0OOywOMoxORNE88izLrNvt+vUAhtlxy1P6BrumgGJ2ZH1UD6RrKLuU57kzjBo44kBFA46+gJF0iX6/b91u1/93u93KTkv6ssZOp1OxDb0fhfbpfdGNsij0Txddqo3WBe0YFKKdxQFIXZCNQUxtTtuljBYsZGTa1Df0/Lo8efqw3W6t0+n8aLGxyqnONxgUcH/0gE1oPIifI6tH25XVfMy/o6wi28j5detgYtyJ/Yzsahz41Q0oI6OppS4man+i/8Y+RH1RZ0yF0TSbeG0qH1cSRiaM/BBGRpkkjEwY+RIYaWaW5T8mQRJGVsvnxMhPfuCKN4hGEqeSzawSgDQoqXHUCZsAcwoYtA3R2FAo56tS6wCA43WKVeOnP9GgIjhof+K91Di0/VHOMchp2/R8DTD0VQ1FnRPQ0XrViLiG93JEQCN45PkhVYEAEgOe5pPrAJm+60BC26m6J+2BNBCOmZkDA6kgnU7Her2e56R3u13/T7pEo9Hw7W5VDnERJPeiYEtqq/zpsQg2cfATbTMGPtqgKTs6kCIAxEFWtBMNajGYK9BooNT+1gXOeA6FHHNtJ23kmlMDIwWPOLDQwVxM3YmDjRic1Z8i0MZ+xd/1e51fkvKjRZkvlZ/GnVNxQevXeFcXP/Q8jU1Zlnmb4kCoThZxYKDAXQfCqTyvJIxMGPksjMyqG4EkjEwY+akYmee5KRIkjPzDYOQnP3DFjsYbx6BCUaOKxhSv0XrV4fTcqIRTDsmT6SnB1PUlDuJgYyKo1BlxlFXdZ2QQnT+2J16rDEMETwVUfUt5PB5nn2IbCeplWVpRFN4+8r75rkEtAj4Ao/cgRSH2nWChU+4apAES9LjfHxcsAgoKfu12uwIkyu7pCx5VPtp/DXr8VwdVO1QAj31iVg85cB9lWgBdPmtdeg5/yEP9QXWiYK3BGFnRbvVFtUe1MfWHyOqojuNgkHoAqxgctd8MONQH4kCQ3yLwnAq2sZ1RDqpr9Y1Yr+obv4iBOgK9tlfbpHGwDqDi4KMuxsUBZV3spJ0aE3SwEeurk0es90OAksqPS8LIhJHPwsjsQW9WfSlwwsiEkR+LkcfVZwkj/5AY+WIphdp5sx8/9aqyYoPjtVqnsjkRuPT6yCxRVJh8N6susqQ+VU68LrY59jveUx2K6+rOj8ZSZzhcX2d00VFUHrpdaAwOFKbq6XdsM9cqK6cAk2WHvG7qwsF0NosAr30muNCmOgOv03XUG+1kITDgwZ/mo7MAWF/iqECr9akeNCDqb3VBsSzLCnulQKhBUfsY+xl1oXKPQBbrjIwY1+pLp/W4BhwK4Kqgp/KBVdO6Go2GbTYbt5eYVx4HS9Gfox9GcKtrdwQW1U1kmOOAKfqo6rkuzuj9qV/jTvRbBfJoJ1Gvdf3UojZfFyNjKcvTm1tEZjfqV+UaGfW6WJfK00vCyOo9E0aewEiYfas+yCSMTBip8ngORpbS9oSRnxcjHysv+sClzoByTjFlOgDXTuh3rSsqQO9r9mMGjWtxCAWbOkZcr/1Q+dDWj48JXRWIUz12XZ3D1TGYmnKiIEy9ek2Uqd5H/7Isq+TRx0AedaEpFRpIMEymcKlT86XR336/9wDPd7NqcFTZESi73a6nUwAomo/OVreACgwewS/eLwY32sBxGEZsSXWpwVaBUN+foWlE1B2ZV+pSue9rAqfKkOvr9B8HFNqOGLw3m02ljmh7dcFfWVDOiywidWk/aLOuqdA2K1MX9RR9Q2VTp8MYVyJTpnVzfhy4qgzjvevqODUAVp/V8yLo1V2rJYJClHUscRZF/V3PqYsPcTCUytNLwsiqLB777Y8aI6W/CSMTRqqMPhYj67wtYeTnwcjHYtuLvYdLZyxUOHUDobqgFQOkCo/vlFivOmQ89zGDitfXtb1OwBxXR4wlGmhsR+wXdUYwPCVD/a+BQhd1KnOBc34IPDWYb7dbf/eDyhZA1r6fAhllDmmfnqOMFAFF8301tzkCiqY96EsayUEfDod+XFk9WDvuQ/11LJjaKSkPkVnTzwp+cVClwZx+q35i8FcGlN9jW9XOog1pXTqg0mP7/d4XY9MeZXSpo64d9AXw0bZhe7rIty4YRp9H3+SlRz/Rdj4WYGPsiAyl9ieWU21UuZ4CzSiDeL1eF2cStN4oF9Vzna7VBnX74RhPYh9OxT69XgGvLmak8uGSMDJh5HMxUn9LGJkwkvo/BiPzLLO9mWX2Ywzj2oSRnx8jX2yGSwNt3U1PKY3/dQBRJ4x4vgLJKYD40P3rjtUxa6cA6RRgqVyoMxq4GldsawwMsT/xCVuL1h37F9uo9zQz6/f7VhSFb1eKI8J26W5CWmfcvED7EGcklNHRa5WBjAENsKFNpD6wm1K73bbBYODvERkMBg4kAA7XkDJBG+qClAZ7Dci6AYhZdecg5AObRWCOtvShAYK2S0FJjyPHOhtQ/4k2GAO7LrrlexyEaD1al4IL/3UNgfpBtIs624vBjv+6iF3tKvb/Qz6qdcdgWqeHCPKR7Ytpsafacyq2RaYwxrb4Wa+NvmZWXYejJcaz2I5T8TH+/hiYpPJ4SRiZMPIpGHncubu6TiZhZMLIePypGLnnmqxaX8LIY3kpjHysvNh7uPRJlIY8Jkg+x2OUuinWuvvGe0VAgwWpA5dTiqkzpDpwjM4WFa/lMfA55VgRUGNwjfepq6suQMc2qAz1v077R5atTmdRXvo5z3Nbr9dmVt3elrqUXYnyRI9lWTrzZmb+npA8zysAoe8OUQDhOAAEMO12x5dCok+YGO6rbYsyjzKokzNBR1+Qp/dT0KDEAcZ+vz/5no264HKqbVFfcYcn1XW0CUq0M/qi7Jv6Rl3R3yPw1fkgcqzLM9dUkbo+1+nt1D10QKnH6nyHYxFE6mQW/T36i4L0qXIKSPR/9PdT8bbuuPYxpmCd0mMqHy4JIxNG1smrDiPLh10KM7NKuxJGJoz8WIw8da72KWHk58fIF30PlxoepU4J+rmOtXrsOj1PjV9/ix2PqRgaoPTaCEpaZzTy2N/ouKf6FJ3rFFhEY4gyiEAb7183PR3/n7p3URS+qLUucGZZZp1Ox8yqufrqWLFeZK1MmU7fRzkRKLRuDbqx/8o8KQACKuTOAxK6SJnvyE3voTamMtXFsqeCVN3v2u6o37rjWmfUaV0bsyyrXT+BbKLOYzBT+6zbYSjaWbT/2Hb0GFNJo+9Gu499rNuNUG2rrmhfTvlKXYA8FXfqmDltr9rTqT5pe7Rv2p86f9b66uSm8SnGtThI07Y/9pnvyhQ/BnSpfLgkjKyXidb/R4+RnGvVOJcwMmGk1vccjPQ2BRdMGHmUQ5THx2LkqThn9oJruD4UCGMjomHqlKyew/+6IK4CoA1xyr4OwLTdj/0WnSTeXxVad5+6Ptcdr7v/KcDUa1W5BP/IdkbHUCPW4+goz3Pfqef8/Nyur69rQa3X63ngWq1Wld/rgiC6UceKjIrZcVGrOpiZ/ejaaD9xZyJ1HMBC66hjjXRgw0Mh56rMVBd1wVWDZzwv2qfaUtQt99Vtf6MOqSPuiBQDdgxSdcE1BjT1gTqwiz7BeXGRcYwBKke1S87RhcrRPlU2db/V+Zzadjz/Q9/rZBb1rf81kMdBgraHPsQUhrqYRp8/1I4Yl2JRu43xTeup01fduak8vSSMTBj5ZIyUdTYJIxNGvgRGakkY+YfDyBd54FLHpLF1ASsGXbMf57XT0br3GMT6Yp11xlYXVKNR6++x3aecUa/RPkcl1QHOqXrqFBs/xyDC/Tqdju+aw2/KwEXjrJOd3m+z2Xi6AaABs0XqQ2T4uF5zifU33YIXwNJgzvRslGcMKpwLG6cLgzWdYrvd+hvdYe3UUTQNLQKm6pHPKtv4vc7BdVGr7jAVByLx4YHfdH2Atg0dRKCLM4fRTuqCfGyrsmR1dnlqUKL915xtvU5lTx8jM6Q2hlx0MFL30lBKBD8F9VPsu7atrr8awOOATtcoRNDU79wj+kOUnQJCXTzVcup+dXGK/ut1dTp9DGQ+xNyl8nhJGJkwUtv4KEZ6XKnOuCSMTBj5sRjpssura6ASRh7LS2Fk3diE8skPXDEIo/wYxPmMUZxi67g+vtwtKrJOOfE+MYByTd20Yd158ZpTCqkDOT1WF2iiPOoMjXbFp3/9zG/L5dKyLHOHi307taON9kX1ws5LZXlkjzRwFkVhRVGYWTXViBdeaps1tcDs+KZ11TFAyFayql9kT4oD7xNBRiwGZutbficw7ff7ysJcgESDAX3P87zy3o4YEFRv8fOp3wiGdXJRG4m60fZpO9EJYFhnI3qvyHbFQQRtUNaJe+luPtHe0eA/0oQAACAASURBVAG7r8X0Cr139K8oqwgQ2CM2puCv+tJ+xT7Guk+t84qgF48zENFztD9aZwSluKbjlL60nacGkSpTBZDYBq5jcPYUplDbWGf7dd9TeVpJGJkw8lkY6btmZJWZm4SRCSM/FiON6+QZIWHk58HIx8qLruGqazDCq2ucCkR/U8HVBb+oWI5rPfFN91oiGxLvGdkU/c9n+kodui1sFLoac/ytDsRwJJVRXRvUMHe7nbNWdWCY53nlfSF1hqJ96vf7tl6vnQGL8iaIw+RR4rbHnU7Hdrudp1TQ58iIKstVlqXXTTBBp5TBYODvFOHhDRDS93/sdjtbr9fORsZ3i9BfHbToAJ7jEZijHdEHfov56xqg+T0CkTJadUFP26GgX2evcfClJQY76jA7vu0+9iHajMoJuWCLGjT13Bj8VPYxOHJ/zon57U8p8Vz1tRjQY50RIGJ7uJb66mKftuEpA1baRb0aU+qK6lF1GHWuA8HYN21njA3RPulnKh9fEkYmjPwQRsqjScLIhJEvipFlWMSVMPJ43kthZJ09UT75gSsaSZ2C6pg6ijpOnJLTTsU69bM6mBpp3K1Fgy+Fe6oiIsDUgY46UxSwGltk/OJ56oDqqBFo+a5MmgIUQQ6WK74xvQ7IYlsUyJAV9bbbbQeWVqtlw+HQdzQaDAa2WCysKArb7XbW7/dttVrZZrOxTqfj12mb1MCz7PBiRrNjkN1sNp4O0Wg0HDDYuhYQ0YW+KlP9HVAgcOqAQ5mZGCCj7akeY8BUXW82G/9dA2U8ho3UgUe0eXRPH7bbbe1ufcq86EBD7ardbjuj2Wq1almxOtuI/gkYab8i46cyi+CrMtN6NIhFVlD7pTpStvYU4Kje4gA26jh+xy/UX7XOeF4MxBEste3aB87V+KH6OcXAxbYQC+JAVPt9CijUl+rsMJXnlYSRCSM/BiOj7hNGJoz8WIw0kVXCyM+LkY+VF5vhUuHFxhJcNW81CloHTFqHGqSyAHotRYUXn2SjoUZWINYRnTv2R+8RDVzbX8c6aF+j48JA1U09x37VgRwOrkyQtq9OJtqHeJ3e69WrVzafz83ssCCYoN3pdGw4HNr9/b3NZjMrisK3ns2yzLrdrs3nc9vv987AFUVheZ5br9ezVqtlvV7P1uu1zWazylqv4XDoQEJaBEChoDAcDq3X61m/33fGkKDGNvQ4KYCr98FpsdMYOOvSQHSApIP9GExU1/xpAFbmkjpjnrgCnv7XFzDGQK3BIfqM6lbboX4RfZI+85/24tcx0OkAqI7xVBvT64ui8N+LoqjITQNdZOejPvT4qbjxnAeKOn+NJcaJukEh/YixC+aUc+vAQIsypiobjbXR1yOrq21Qm1OWOsoxlY8rCSMTRj4HI80Og8KEkQkjKQkj/9fGyBd54NKnUApGqU+hkZWOwZBjUdCqhGgYUTiUU/dSgUZj1nr0eDxWd09VTgzwauTadpgn3kCOQaBQ3a0nBjeVDbLWN5m32+1KUKC/sQ5yuWHeaENRFNbtdm0ymTjrdnZ2Zo1Gw9brtS0WC2u1WjaZTKzb7dpsNrNut2tmh1x53umxXC6t0+lUGEUAYDAY2HA4rNhMv9/3lIbRaFQJWOSgNxoNz0MfjUY2Go2ckaLvb968sV/+8pd2c3Nj//RP/1RhhzQQoQ/uo7pSHWrbI0jU2YCZOVipDutYKu4XmbM4aMEeFAT1GPeONhPvmWXHBd70GQBVljemdKicon9qXTCmCkjRP+hTzBnnemV6WUugwU0/q13XxRbVSQTDCIBaYlzTgB11rT6vwVwLdemCbWVYY2xT2aNPBSjaFO/NZ7Ur5BkHHFpiXdEPHgPaVB4vCSMTRj4dIx9ilGU2Go0SRiaM/GSMVM9PGPmHw8gXe+DSTtZ1Ts9VRik2Ujsag7KCTAQcDLgu+GKIUVkUzq1z5DohxoCsCjoV7NXItP+9Xs8DtLIg0fm1XmVZVKaq/F6vZ4vFonIdsuB8AKvT6ThTwq5K3W7Xut2uDYdDB4Tdbuegs1gsrNPp2GKxsKurK8uyzNMTCHKz2cyyLLPFYmHn5+fWbrft7u7Oer2e7fd7u7y89Bz4fr/vQaPb7Vqv1zOzQ9DgRYwwd4PBwM7OzhykcMavv/7afv3rX9tf/MVf2OXlpf3ud7+zf/iHf6joTx03sol6jsq9TqcRMLAdzeXH7gjSMWUFm4CFiQ6sTGJsewz61If9xAcStRPO0YBOW5X1jMErBtZ4z1h0kIOcI5DqOgL6S8qFDiA5X9NEVB86KKDeU0y06kf7FeNQ1HVcqK5sl7aD+2gd2vd4rtqZtiGCnYKMglpsvwIUrGC0Ydhrzo+ghSzRodpOKs8vCSMTRj4ZI++nh7bkWcLIhJEvgpEqz4SRnxcjH3voepEHLhVSnCrWhkbDUwHGzur58Tx1Co6rk0QBK+NQ93SqDEI0hseAMrYpOpQey7LMgyELaAnO1MO56uhaV1276s6DyTo/P7eyLG2xWDizp6BqZr7gttPpWL/ftzzP7e7uzvr9vr8QkXsAOO122waDgQdy8scViNQG9vu9p1dwv1arZYvFwkajkc1mMzs7O7PBYODpEvP53EGEAsBlWWar1cr2+7397Gc/s7/8y7+0X//61/b69WubzWb2z//8z/a3f/u39u2339p+v3eGUvsOcBN0sizz9A6+x7zgaCcK4KQExDURylorQxsdH/3j/Pqb2l9c+Kmgwvl1v6udaOoC2+qy+FQXU2uAisCjOfIq07rBYWQtdYAV7RGfMKumgtAfZFXnn9HfonzUf2Lb6nxNi14XU0kUVFRm+l/7zLE4eIj9Upavrh3RNmiHMqlxMKly1DgTBwkUZSLr5JLK00rCyISRT8XI9WZri4d+JIxMGPkSGJkHzONzwsiXx8jHcPJFdylUFqIuQNORmKcen6L1XGXhuIcqgd+iUPisCuQ8FFXHZESDiMapQUmdRhkCzmfaX5/6dZtY2kCO9/39vX9nOjvKF5mhZPK61ZE0z7Xf75vZIYVBp/wjgwpbNp1ObTgcWrfbtc1m44t6lR29vr62drvteiZlwuwASqvVytbrtfX7fbu7u3O2r91uV9IhyGvvdruW54eccdhBjrXb7crv3W7XfvGLX9hf/dVf2a9+9St78+aNFUVh//Iv/2J///d/b//+7//uAY/3oygrpHZBnfwWg7/mVWuwiOCAPmJAjQ8BahsE7xhQIoOsQZw6uRcLj7FbAgl/GrC4Z/RHbYOyNWr38b/aftz1SwO+Au6pABuZR9qgwInPKFBqP2IaiRZ0qwFU5ax+G31Y45deH2OH6isOSPU3jUPIOTLFWk7FOGKmzkrE+9XpTAcRkTXWdun9Y1x+DExS+XBJGJkw8kMYOej37doOKYX9fj9hZMLIyvGPwUjFiISRx/I5MPKx8iIPXOpU2pE64RCAzI5TdirQSuPCdp5m1W04IyjofWKwr1Mk37XNtAMmJ7ILajCnlEgQhM3a7XYemJkK1h2HyOUeDAbOsrHjEUWDFNdmWebnERAwhsFg4CkYsIaLxcID63a7teVyaWZmFxcXtl6vbT6f2/n5ue12O5vP5/bll18600gfR6ORb4e7Wq2s2+36FDsAY2a+C1Ov13O5ttttG41GNp/PrdFo2MXFheeg6zT4cDi0drvt/Wm329ZoNGy5XNpf//Vf29/8zd/Y3d2d/cd//If93d/9nf3mN7+x1WrlC4tJ21Cdax9g7PjPnw7gzY6gHJli6onHYcPQM/eMqQuRyYoBLQ5oOEZKgbIzBDnkhy/qgKfOZ/QBRYEk+oQe3+12lUGFBrs4wFIQjz6i7Vfw5joFmDgI4xyVF+dG0I7AFa/nT/WoQf4UgJpVY96pOKN9PRWw9R5RRqqPGCPrUkbqgLDue52s9J46YKm7NpXnl4SRCSOfipHDs6797qHtCSMTRr4ERio5orE/YeTLY+Rj5cVSCinKMkenMKsKTdkEFZjZ8clVmRKtQ5++Y/0Ytk4tR0DgCRjB6T1VcJyjQq5ThBpcWR62bCVoUD+BCQfj2Hw+93uQ6oDTt1ot34WG882OC3nZfnaz2fi11MOiYK4bjUbO4PH0D/tycXHheerD4dD2+31l1yFAq9k8vK1+vV7bcDisBCbaCdumej0/P7d+v29FUdhkMnFQjMzF+fm56wJA0J2U/vEf/9H+7d/+zd6/f2/r9doBpN/vu6y1zshOKJCoHOgHxzVwqgz5HtMQGBihE71Ot5UlcNPHGJB0AGFWZW7U1vU4Oz4iK/UHBQMN4lE2yCQCH3LhHAUsDWzq18oqKfBE345+CwhSv26TrP3W/tcNWlXfnKf31LikA0/awLEIsMpmaZuV/Y9t4XsEhtiWyHAq0Or96oK69kMHv7EewDbODMSBNedFm41xOJWnl4SRCSOfjJGbBwzILGFkwsiK/3wsRnodVn0wShj58hj5WHnRB64o7NhY7Tif46BJv+sONjqVq05Rdw+cPualR4OkRBBRhauhxPNRkA5GYnBQY4U14zxl3ii73WEBbL/f9/YQ5HkxoQIBwXG3OyzmvL+/t7Ozsx/lZJMOYGb+gsXhcGi73c5ub2+draON3W7XWbPVauXXw0SSjoC8V6uVFUVhvV7PVquVbbdbm0wmdnV1ZUVRWKfTcT0Oh0ObTqcOPvSH1A89hr4IUNfX13Zzc2O9Xs/bF4MguogDAAUOZe0AVAV5rlXGuc5+Tg2EYtCK4EO7tF61PQ3WEWA0QOt5apf8KTMJMGgg1Tp0gEWbou3redTfarU8QOm9CFw6aNP6dGCmLD7y0f5Hf9fgq8dUHmoL2ufIZkag4d4sblc5aLsioJ0CEj0/6kltJaZ1nKpX69d4SL06mFa9aL9PDc4pMYbVgXcqTy8JIxNGPgcjD0qwhJEJI18EI91v8/pZ64SRL4eRj5UXSymMiq27eQzqdYMmFSABTM9B6ByPT6FqDNq+unbU1a2BQR1RX1QXFaJMUJ0ctN8xvUNBCPZNGT5SKxqNhg0GA39BIiDEAl0CVq/XcyZvMBjY7e2tg9N6vXZmT8FhNBrZdru1xWJhg8HA70NbSV2AySPvG3Ajvx4GDxlut1s7OzurrNf69ttv7d27dzYYDJzlUmAEzEih6HQ6/rsuWkUv6EblCrggewUOrkfuyDYO1DlWluWPXkZpVg0uce2QBkRsFJnE9AG1WW2DHov3pB70oYMh+otu1VY1aPAbIKx2Dcip/0SA4Xw9D71oUVDRdmjghqXV/iGDGMTjQE91zP3U5+r8NQ42drud254OQuP9VE7IIvp83UCjDkyUGY2/x77rNbHoIEHBSe1dz9X/pwZHek9+1zpTeX5JGJkw8skY+fbtQ8ctYaQljHwpjDyYVJYw8jNj5GPlxVMKtVF1wuX8qCQVany61Xrq6mRqW3/DSKmDe8Xro8C0LRgaCtO6tC/6pF33pBsZPq6Jgxh97wPv8KBfq9Wq0l5daEyAv7+/dzDRwA8Y0KZGo1GZ0p9MJrbf7221WtlqtfI0CICs3W5bURS+wDnLMme8SMngN+pkQTEB+ebmxn7+85/bdru1u7s7azQO2/yen597oFEHo6/b7daZOpWt2gjAhjwjQCkA8Z/fsAkNhOqYtA37gCUliEbGiWuRvwbgyDCiizjgUP/IsqyyiNzsx+/+iEDIudofPa72xyJsBRL1FexS70UbGDBowNQArECnYB8HXJvNpjKFrwOXKLMoozgQiHWrXEj/YX2H/o5/qI2dAgcNrPQ1Low+BRD6exzssDYl1qHX8llloNcoS49fYsdRvnWyijEae8Y/6mJ9Kh8uCSMTRj4ZI3cPOi8tYaQljDT7dIz0vmc/xsaEkS+LkY+VF9ulUDtOo3DsaJzKvCh7oB3gPK1THSjWx3cdgNUF9WgMWpfWQ4lK04ClDEA8JwY9s2p+c7fbtWazaavVqtLudrttu111209ABZah2+06e0N/2J0IAyJnnSDP1qbtdtud1+y4gw71wuzRT34HKFi8bGaVzxRSO5rNpi8Khl3bbDZ2dnbmrF+WZZ6jrjZCPnfUgzJQ1ElbNY+ZPnC+pkdEvaJL2qH3igMMZZewW47H4K6faS/61OCPvqhH+xEDONfpIIJ7aSDgevVDDUBlWVZSHGjHKXYoBka9pzKFMd1E2ST1SfVv5KjX0i8GBHzmfjqw00GSykh/x1a4p96LPqrM6/yWunUwqQFcdUGdMVhHYKkDCLWNOnulPWoLEdzMrLKhAcexsyhzHbSjL2X3VVdx0JLK00vCyISRZk/DyIOQzTNBEkYmjPwUjHT8y6rrBhNGvjxGPlZe9MXH+rSuzlIXuCk61Xfq6VGFXje4imxfPM/sCCSxbVqPKpuiThkdNDIvyhLqYBD2g9+URTgFfBjMdru11WpV2fo13os2NRqH3ZbYaakoCn/fR3RQwEVZLZUnKRcYagQYDSQwOKROwNo1GsdtZ3u9nt3d3dlwOHS2cL/f+1a4yIIFx8p+RUYUh6AN0WGQh+pX7SIGXpVlBA/awGf+lP1QpirqRlNpCN4VxqnGzkgVMTPXE/fhPGWL1I/qAqPattpMZPiQs/YpDvAo6q/q308JwnGQybkMInQNB/YV260lBmvsOuqzjhk7FWfioFXlqPFGz4l2qm2ra6fKiD5rbIr9iSksWmd88IwDoSh31bsOJNQOtH9xwJTK80rCyISRT8XIwWh0aLNl3taEkQkjPwUjS/txWnLCyM+DkY+VF30PVwQDPc5/GqmGS4lsnSpZz4lCQhjRUKIRRGHXKULvqQYY/9QIYZFUKZQIICgVkNA6ooPqvTGEaMywbto3ArqmAZHfbXZ8GzgAENktNTC9P4uMFUgAGoBMdbzb7ZwRzPPclsult58F3vriRuyCxcPr9bqyEFztivZp0AaINFVCUyZUrpHNU/tRAIn/deAJoCjTSD0qO154qbtkqW1Qh9om/1VeAA39VYYsglO0cwUd9SWO6YLlaOcRmJC/+pgyWGq72Kj2R+/NoEoHLKQNYEONRsMXsWuQVSZO05qU1eb+0W649jF2SgGiDlSjLLRf2l+NF1pHlEcM+gpe9E2vqdOH9lHbo3XFGY7YR42peq94n1SeXxJGJoz8MEZW9Z0wMmHkp2Jks/Hw8JlXZ0cTRr48Rj5WPvmBq+4mMaA+1hgVoH6vq+cUWMTPdUqkvsjinKo7sol155tVgQglxfxhNSA9Nyo6Ojy/a9AltzYCMu0lHxUnjKwS94tsT57nDnhlWTqDRhqBglKdkQNQ1Mk0936/d7Do9XqW57m/+0RZrt3u+K6VqMuY269MDm0CFEkP4U/bpn/UjewiIOBIuvia82KKj9aP3tXW0I0yNZEZi2DFvSjUEe0l3iPamgKSyo7+a8BQxrRualzlrjaq9hDTXPR3vRd9VfuhTo4VReFpNvqb1ht9UnWrbCH3RG5q/zogif4U5afnqx5iMK7Th8okDpK1Dj1HQToCkYJLHGCrDKKvq5wU/HQAQIkzFKk8vySMTBj5HIzMGPBnxzRFs4SRCSM/HiNdLpYw8g+JkZ/8wKWdrzNAZZ0oKuS6/+qIsRMqEDXUGPj1nrQNVkEZhnhcjU3vF8FBDTL2TduurJ4GYj7jMFGJ3EMBgzx1WC1SEXR6OTpQXfsJvtGZNWh3Oh1brVbebpw6OpKCVgz6qhtSKpAZ2+Xy0kiYtwj21M29CHjKwClwRHBTp4nMCiWCvy5QLoqiNhc9y47Mp96T34ui8POpgzbAMKEvdpzSYE8AA8jovwYYBXS1m1M+o3YZg1YcnGhwqwuYWh82oDrjeAzIyFUHTrCb3Bdbo8/YYEwXiPYewTIOIiP4K3hr7KB+jVNKDiD/CEjR5zSmaHu5v54f64uBW+WocUdlim41dik4qq3ENsZ76Dl1Mkjl6SVhZMLI52AksxH0J2FkwshPxcg9DxuWMPJzY+Rj5UXXcNEZ/uvNVTHxWJZlbjQ4pSpAp1spavgoQAWD46pycGRVPOfp9apoVQJ10ybeeo/DE9h0AWKWHff6px1qVCzubTQOu/YxVazBT4MGzA/5yWog6vwApS7YVTmo4apcKNyHdrKYkAXKZuZtpa/qEDG/nYClLA3nahvVmHFE/lTO1BPBBHlENlFtVIMfdsfxyGDpblYU5AHbCDjrYGG5XFpRFLZYLGy9Xju4ANR5nvvOVtpHBQ1NYdDAzX8FhDiAqvNNztXgUTeAUTnFc9SmVWY6kFBgj6ClzCVAwvmnWDGu13toneq7Kqs4uNJ3FmlQxu50IKSDSo1Nmq5CG8zM/YO6+K6DXtWD+kUcOKtPsT4hxk6Nf6o7PRb7pIBTNwCJthQH7nosleeVhJEJI1Uuj2Fk3niIg7KFd8LIhJGfgpFGn8tq3EkY+fIY+Vh5sQcuvVF8yqsTxCnB8p3fMUquo5MqPFWY2Y9zwakHIyZ/W+tXwel1OA7BHqM2M0+N03dl0AdybvmOcdA++txut322h75FoND3jOz3exsMBt5vXRiq/dC2qPzUsON0qxodjqeG3mgc3/9R54gK+vEY/VZ5wmiRkqHOTq77brerpCwo80H9Coy6y4zaTt1gpy4VAuYOhlQX5CrIozf+KyByTVEUdn9/b0VRWFEUDkxmZpPJxPI8t7dv31qn0/F3sRRFYXmeO6u7XC4dmOiHDpjqmHEt6heqX3SMzelxM6uwhvym9WCvHNeBltqLDgr1XSw6SNOgqrao9wKkuC4OCLWeeB33ZNcpbAa/QvcKgOo7cRCmfWRgp3LnGGkeer4yxaq/CCgMQKkLMNQ+xTZq0fjIPfEnPV/1qu2I8UTjbCrPLwkjE0Y+ByPNjrMRCSMTRn4qRmbi43XXJYx8OYyM99HyYptmqJPWgYmeF8FAp5K1EKDV8OqKCkZnT5R9wpDqghxt1vurwRIY+MyiTlgL6ucaAku/37csy+z+/t72+71vhR4DQrfb9TfNKwtAm2ADMHZd8Esb9cWGyiIpI0K7kAH16HQ9sqON9AkGI8uOaQJch6OrI5ZlaavVygaDQeXJH6Aoy+rLEumn5sTv98dUAx1gqO1E9ikyE1pvnYPRN44BItvt1gGAvjUaxx2lAEHef0JBTpvNxubzucsHWa1WK+v1erZYLOzt27fWbDat1+tVrtnv99btdm0ymdh6vbbxeGzb7daurq4qgBrTBxTE9bjKTfVMcEImfMeudWARBxcKvBqQlNWiLchWfTUOAh9jo+L/siwdBNS+ARPemaIDDlKKFHB0UKf3AAQjaMcBivYHX9G4o36HrKiP39THTwEZJfovnxngUl8EuwjscXCg/Yz31u/pYevTSsLIhJFPwci9HWNFfEhPGJkwUnX5HIw0M2s0D/pJGPmHwcgX2zRDAyAOpI3Rc/V8Da50mo6q0DXQ6RMwymLqnrr1M/VFRksVXyc0ZeT0dx4aYFhYrEvQ56k7vh9EUxIajeNCRwI60+44H3LkfST0VaexeaeVypTAoEaEk1N/NBTkoKwI7ad+8qqpH7nptTq9iwxJCeA7W/jifLwwUq+DOYuBIbJE9EOBmrpoD/dRW9V6YNW4X1EUtt1uK+9iAThgcQHV6LjUs1qtvC5AifrH47HXfXZ2ZmVZ2vX1dSXQLxYL++GHH6zVatlwOLTdbmfn5+e2Xq9tPp//yLEVxNXf0FEEetU16w6wK80NBzQ0aCvYwKip/FUv2ibAWtfzMUgidkRWMfYxstXEGxguzqEuZEBdGvC5lmsUnCNTqDKM9anMsRUFjxjU9bj6lAIs10S/iDGUvwhisU0K5jrorqsv9kt9V2WUytNKwsiEkc/ByHJfnelMGJkw8lMxsgwP3QkjPx9Gxnq1vMimGQi5rgH8ro1WBalToMB4fpZlHoz1CVzf0g6bYnbMrY5PuziIpkjQZjVo2sGOR8pkKLPU7Xa9TWqUGBNt1QWgOkCEtYMVoux2OwcrfcKPLAMGouANixef5DEg+q8OolO6ej3nKiupqYBqvJqmAJtFINLUB10EnGWHl1vynW1NqSPaEv1AfmbV91hoX7Rv/M4xBT/YwaIobLVaORCsVqsKQwljq87LNdhNWR7y0mezmU2nU69nNptVZPLDDz9Ys9m0s7Mzr4dATv91UHNzc+O67vV6NplM7P37935fDdzqT6p7ftOUFGRG3disgghBkfO328NWzdg5dqUDCw182rbo23zmuw584hS/ppJwLx2k6cMWdepAUEEPe40DvAjQgJ3qnfNUluo/6jP6m+arUxRANOVH45ACMXV+SN/aHo0Bek48HgfM6lN6bZrlen5JGJkwEt09BSObMhtUlmXCyISRn4yRTTbcsCxhpH1ejHysvEhKoXZSWTMapn8Yq1n1bdA63a5CpC6cVXNcqT8GPH0S16d1vReC0cFaBLyyLJ2NghHQc3kXRmQGolIAF9ITYOTMjlulw+I1m00PZBg+dUew1qAwHA4tz3O7vr42s8N7RgAwgrY+scfgity73a73mfeD8LsGljw/5GivVqsK69RoHBfqEpBgrVhAjeECzARQ/mtAU90TDGBLK/nuD+1TxlcBWJkZBhvKJi2XS1+QrYOHLMscEMgRR1/L5bIy0KceZf8Wi0VFn/1+39m/Xq/n0/sRMOm3+sput7PFYmG73c4uLy+tKAq7urqy6XTq98RWdQtg6kH/yrzyXYGTvscBIjInqOvghHe70FbtSwzGAIaeRztUd9hnHFgQyKmDNsf3i9Bv/FTBCX/M89zf9aPxJg42eUcMcijLA4MfU0aIQ8SBuL6BuokJDFQYdGHDdUClMSXGKuQOa89xHSBEecd6+K6gr/5Vd34qTysJIxNGPhUjC2f+j+utEkYmjKQdqrunYmT+YAPNVrPyAJow8veLkS+SUqjMp3acYwyCNJhpoxRUNHAom0ddKEtBhQCEcWu9WZZ5kKZobnfcKQjl81mfkjHe5XLpT/2wbDABGuRxbNqtv8HmBwasyQAAIABJREFU5PnhvVTr9dodFSBQppHvGsg6nY71+327u7uzxWLhTo1h6n1hqABbDE+NGD3xH70WReFgpEEIWTNVruBUFIUHNILner2unGNmtlwuKwNndKq7USmzSioi7dBBA7pG95o/r4wEx3e7nYMIC2811YXgic0CPgxASNMheAAeMHir1cq63a51Oh1rtVrW7XZtsVjYYDDwdnMtwUqBW4OSWRXE7+/v7fz83H71q1/Zv/7rv9r79+8dXFerlU2nU5eF6gZ5Ktir/DSQRhaKulqtlgdodK0PPLqrUgyGajsKNBrky7J05hofVbvFLyMbiF9rHFBbQR7qX5QY3DVo01bdeUxtOOpJfVZ9jQEMII+9qt8hI+KfxhCNd3EwqLrTmQIdkMTBI21XsKP9+J6CcnrY+riSMDJh5HMwEutA+wkjE0Z+KkZqnbQ1YeTvHyNfZIZLDTDLju+80CdmDEufirUD8QlXpw8xUoo+Javh81mDP4pWBgAhI0xVHO3vdDqudD1Hc+G5J8bHPbIsq+yqhPKQQcwdV2UpKGLAODJ16xN+s9m0fr/vzsH5WXZg36gH+SkDR5uVJcQIqYMgsVqtfIGzmXkAYYGsMj7UQ/vqBhNmVsmthxnTz5FRjfaDfcECapCEYYrMLABEWoTaEGAOy/bVV1/ZbDaz0WjkC4WRIfej77CApFCQAlKWpQ0GA+97URQ2Go1st9vZbDarAKUCJ/JTW6ad6Prt27fW7/ftl7/8pduK2gg58ciaAVi327XpdOrrD9DlaDSy7XZri8WiYkeAnQKP2jl6IDAqq4dN0B/N+UeW7GSl9qN+0u/3HUCwH7UnjTP6hz9wb/qDPFlsrfaEjyPD2PfIMMK+6iCF2EGMMjPrdrsOvJxLDEKmGthhXbEFZKIgovWrXNSe4kMp5yk7S1EGmdik91PfTeV5JWFkwsgnY+SDG5ZWJoxMGPkiGOljL0sY+bkx8rHyYrsUUmLn+E+jMTycUQusBU//BBV9us3zYx66PlFyX4wMISCwyqLUsrpzD3USGHBWNVpYEwIghgnrosFK2TnaBEMGoGgaBkGKYziBMgy0AxlxbWRbVJZmVgnoMDEALOfBrnEN8o16m8/n1u12PWArM4PhKlsJS0KfzMzZN/ShjBMBQ0GAgNjv971N1KdOgqOqDumvDjb0PAAfmXa7XRuPxzYej22xWFiWZR740d9wOLTFYlGxI+RQFIWt12t79eqV9Xo9++abb9yuaDvpKHxnRyaVt4IzrA/HsUGzAwv1n//5n/bLX/7SfvKTn9jbt2/9OAMovTdMNTIhP34+n7vfcRzAhaWkHuTOIEPZKbPqDkzKxEXWlGCtYIOt6e6VcWDFMWW0dLCELeBL9EuBmdQWBooaULEtQIU2qn8oq4nv4I8wjfiosp+qRx3A0l98iHPU97hGAVyvo90KCBGMdLACk89gV2VHH+vAJj1wfXpJGJkwku+1GNnpehtpW8LIhJGfgpFu9w/hO2Gk+XHkwfdPxcjHyout4dL/UfAc0wbB/PA0r4wAzo1SKARirscINRVCQSeyXjrAV0UiUBxNmTYMYbFYWFmWNh6PbTqdOlOCce73e893JXVOn7KVjcTZ1+u19Xo9Z340OPf7fRuNRv5d2blWq2Wz2eygQMmbx2F7vV4FJDTgaXBQPen6APLJ6Y+ZeeDFkaOTKbjAaKBD+k8wMTvuBEWAIEjSRlI5Go3jYmmcHNniPLQDfamtqD2p/VH4Tn41YIlt9no9m8/nHlzIU4eRw5Gpa7Va2VdffWVff/21fffdd/4+GH5nIKIpLfQbW0Ff9BVZA86bzcbZoGazab/97W/t8vLSgzspPMhM2V4GQDB1AEar1bLFYuGyRi/YEHLRaXTWJmhQVnviN7MjI6tpIMqOo0u1S2WClWGmXwpcyuRyjf6u9osvjUYjlz9AjV1pWowOSrFT2qhBnwGJDlyRHfbMMfwNP1Bd8x996yBRwVhtGbkp+04cpN8UfZEo1yrgRAaVc+hjKs8vCSMTRj4XI7GXhJEJIz8VI50oKy1h5GfGyMceuh594Mqy7H8zs38uy/LmsfNwBDquCqBR2nAz82lMhBCfNAlY1EUdKtzIUGnAxPhQuE4/UqiTKX8Ur31pNBqeJsB9er2e7XY7Z5T0KZg2afBXNgn5wOKUZWnD4dCNFSDC+XACBUO+K4gSwJUJQm7KLuq5yFenvEkX6HQ6tlwu/RhtRmaRIVXwVPZJAw0sDDInrUFBnQCjDo48kaXam5lVnJmgjz7UCQBbdQi1mdFo5O1otY67KbbbbZtOpxU2ErCO6QKbzcbu7u7s4uLCQV5Zlmaz6QtQsWNsHv0iJ2VvdFCFLQJkm83Grq6ubDAYVPpNQFG5KTu+3x92EaM9g8HAzMzZNnShAQm7BuSVNcdmGcwoG6UsloKrDkIiANQxewAUflTHruGT6iPqPwTuzWbjawewkxholdkiZnE96Uir1aoSq5AL59BXGHntF4vbNT7yR50qGwZIyl4rCKFXfFTZQrMjcKoeVD6xKLBrnE7lUBJGJox8aYzc7auvXkgYmTDyUzGyjihLGPl5MPKx8ugDV1mW/8+jVz8UGkYnECr/eTJXpkOns5X144lWAwHnq/AADDU2zUFlarjT6XiARnkKWLSBa2A4yJmFLcmywzSymdlwOKwEaGUZYVkwVhS63++t1+u5MxNYMJCzszObzWa+IxPGpPViKCzQVTnTR/KN0Qvy5LvKHKNS48bwYIHoP/duNBrucOqgOg2PE9DvLMtssVh4kKG/i8XC5vN5ZSGsphLgZAoe6F2ZPGSquldw16Cudofs0RP95EEQ9mIwGPhCXphNAivXrNdrZ7P2+719//33vkNVHOxgp9iNpv2UZem52gpCZlZZLI4cleVDHzoVrlP3DJR0gKEMK+sQkAFtiaCA/xA0ucdgMLD5fO4+q4NKZZcAMw10BH1ld3UgyOd+v++61AGTp+M82KzeDxvAZ3SggC5YpI2tKUBxb37H9tCLsn7IjtiELnSNjcqQtrbbbR+cKJuLz0VWVwFWByN6rC74Kzun/dNr8Tkd2Gl8TuVYEkYmjHxpjDxumnGQccLIhJGfipF542GGvZEnjPwDYuSLreEiQMHSaJCmE6c6iNBgt/RJlmCl4IQTwW5jiHHaVp9qCaqcQ1F2StkUrs3zvLLgl/qHw6GtVit/8kfgAAAsAQphin02m7kMCPL8jnOqrHa7nTMAtDsypDApGCaANhqNbDab+TSu9hXWjvYiH6bDcYiiKHzBsQ5wdQpfdQWrCfunMxDqhBpAYYvMDo7Z7XZtNpu53jBsdNnr9fwa7st/dUIKTgxbqiwIQQMWBz0CRvw+HA4dFDUQmh3Yqvv7+8q7Sdh5CbDBB0h1wDewr/1+b+v12nUTwRnWDN/QAKPn9Xq9CnvHfXkXCqCrdWG/XENApj1cw3fsRgeJyvBpAGSARoDs9XqVtRfKKqEXvisocF+NKzrwVH+g//iVDiR18NNut20+n1tZHhZtMyCgXmSqg1UGp+v12hqNho3HY7u7u/PBIXYOKwc7jY9p/5SB5T4UjXXYIsdiepnGUHSqa3j0WvUJdKay06Ky5Ht64Pr4kjAyYeRTMLIhqWEJIxNGvgRGuj1Z1QYSRr48Rj5WXnTTDASJ8vW4OrzuaIIxIghViB7nWjq02+1cQfwRCBVgzI4bNfAH28N1OLOmNOjTq+adA3babnVm2sRv5KITQPr9vrM/Zla5L8ZK/jlOiyPSF3Jku92uvxRxMBh4CgT1wjQxDY68kaUuStZcaWQHA9rr9Wy1WnlwQKcEBtpIXerk6kxmh2BDXRi6pjdosFEHwFk2m41Np9MK4CnTlWWZgxEyQ8bqVDAugDigyba0Wi9T+tvt1nWHrWEXRVHYYrGo9Au9EzjK8rCb0Ha79fsrM8o13E+ZKLY3pm30SwG3KAqbTCZmZr5jFsER+8J3Go3qzkLcS4MHDDa7SuGfMQ1IwY7fVWZq49gIMlRZIQ/Ai7aaVXdN4/4cj2shOA4zRzuQ02Kx8FQHBnAEaK7VmIJN3d3dubx6vd6P9EYa1Waz8RSr9XrtslV71T5wL/oMk4dMkCvt53xNvaDdtEdjqN5DjytzT2yIJcZeBbxUnl8SRiaM/DBGPryM+mGXwoSRCSM/FSNbzcNnZo45njDy5THysYeuF3kPl+Zd0mCz43S92fEN3So4TTOgLv3M+Rq8AX0MUoGLNAWupz5lxlC8thWD4zetE4CIQmW6lXti7ICG2dG48zz3haEYHE/maizcQ41cA6tZ9ame72ogGD4AyXahKnOz45bspCzgfN1u15mH5XLpdaO/3W7nfYBh4lqCTVEUlel45IT8zY5gx2faRz0K/DgRZbPZOHsKo0S/aY+yXVwL88EUMMDcbret3+9bs9m05XLpqY/7/eFlmTrdzb2xnf1+7+8T2W63/pu2q9Pp+MJi9KYPB+qoyNDsyLAQJLBTArWmNcAWk7YQ0zBgPBl8aIAlqOmaBPUJBoAarOgD9RMgkS3nqR4mk4nleW6z2cztjJx+M6u0i/7xnYEMQKNMv7LgyFvlijw11UAZMey01Wr57lO8sFPXkcBg6gJ1lSE+zICQ9wbhKxr/sFEd9FIn8Y1UC2U10XF88MGHYBGVbdffFWwUGDTmxWuIUfqAkMrTS8LIhJHPwcgyYJ1ZwsiEkZ+Gke4bjdxne1WuCSN/Pxj5YjNc+gSpHUTxCA+h8PRPIYjHJ1TtIMrAkPRpXp9OFVBQNsGDtiiLoKyhBmumWmFYlE2hr/oUHNsbn3YxAAJCnucVwIGpwHjW67WnORDcABzuT9qAsp8qJ7MDk8MOQ8hNZW1mFdaR9vf7fZvP564r2sp9NNggz3b78CLExWLhU+AAEvceDocVecDicAxdq2OpjeGE5H8TsMwOaQ13d3cVMINBoc2AJbnO2Ol6vbb9fu8LnwmCWZbZeDy2b775xrrdboVhRoaLxcImk0llkSwARO4zbNhyuXSwp8Am0z4YM4IZQZ7petYxsDMYAVhTT/ABZYrUB7EBTV/o9/tuawykYCHRh9YDcOpgBSaROpVlZ8F6q9Wq7Ex0c3PjekKm2As7kdG/0Wjkete+YI/Yi7Kv2EKz2fRti7vdrg+YAL7oXwqsLJ7Gb/VdMrqoWNO3suy4UJogrzFAGT0dhCrTrCDOoAHAjucgC/2vtqDxSGcfNOYCHnqNxtdUPq4kjEwY+RSMzPKjj+nDX8LIhJEfi5HuD1lesauEkb9fjHyRBy46oUBCJ1EQ382OT9E4rwa4yEphACoIZWR4WiXgYlwYc7PZ9G1lOYf7xKdpNQYVcAQ6gmhkVriOQKaMCwyEgpfZkUXTttAvBUyCeWQvYR2U3QOEcNZer+e7TLnixeFIUaDt1IPTZFnm+dYs+AS0lIkkR562q23AupAmQHCEIdGApFPkjUbDJpOJTadTT1Ug0BGop9OplWVpt7e3P8ozx8YovV7PXr165e0gWPKOE/qgdtBut52V++KLL7xO5EVeM+8WWS6XDh6bzcbOzs5sPp974EIP6JSAzz3RM4wvcl6tVjYcDq3VatlyubRms+myw5Zgl/A3wAd7ZK3DbDbzwRWDJA16sEf6/hlsDlslWCuTqAMrbJXPpIkgA9pSFIXNZjNnppSZL8vSt5HVtB78SO0LX6Tv2BNrMpQxZ1Cgvo4MFOhg0JWx1H7pMXRI0RkNHVwSFxjgMPjCZ3QQFXPRSZPqdDqVnbz0Phr4iREak4kVOgDWgSUDOv40NioYpfL0kjAyYeSTMVLew4UME0YmjPwUjNyF2Z6EkZ8PIx8rL7ppBg2nUzFYI8g8z63X61Wm8QEKdXwFh6hMfsfI9ToFIM7DqQhCmt/Kee12u5KeQJ6oAkdkBkmXoI88ZSu4qNLokyqW81TxOA6BRgENhoc24/BaJ2kHGCMBCflpbjLTv3xXAD07O6vk8+t/wAYZq1NpTq4yRuiBfHCuVfAyqy54xflvb28dfJj+ZwHuer225XJp0+nUBoOBdbtdu76+tv3+kDIyGAys3W7bZDLxdIiiKDzX3+yYc0zuOXKYzWb21Vdf2Zs3b2yz2di7d+887x4WaTgcOqDSZg1EvV7PmTbsFduBwWLKHZnA4iFv0gmU9QJwI/vKOdyv0+l4wKKdysrOZrNK0OQdKrBvkbHFLtkKeDwe2/v3792XlP1BttyPRcuwdeT9A2C8WLPdbttisfA1HQAgbBx9pV3q68rE4ZP4GrLX/jBAAayIDXEAir0om42+uUaDsv6m37U+7qvrUBQYIlunaTSco+1U0IwDcI232i6V1Sm2juOpfFxJGJkw8ikY2e4cY5BemzAyYeTHYmSrWX0JcsLIz4eRj5VPfuCikQhAAzg3V2ZIWWgcAcMgmCmDpwJFiNopNRazat6v2fFdH9wLB8GoED7GhzA1gOuUpgZTnYqlXgxN20d7MFxlHnq9njUaDVsul34+AQXGi8WYtF+ZtzzPnZnThcYEGK7TaxQMYGjIaScvd7fbVXKIR6NRZaEs8lQWj7YrSAJ0yJM0geFw6CyTgp8Cq5m5zZyfn9v19bUtl0tf9Jplmc1mMxsMBt6Xdrttt7e31u12bTKZ2H6/t9Fo5FPu2CGsLo6KrM3MQbwoCpvP53Z7e2v9ft+Wy6X1+/0KEGdZ5jpkCp3peO612Ww8mDJIgfXSIMRgxuz48kNdAzCbzWw+n9vr16/t7u7OmanBYOD58Z1Ox8F2u936FrYsTNf+sdibB2BYI2VvaBsskzJQAAb3++KLL5xR4r6NRsOm06n1+33r9/se7MvyMMsJqOT5IdV0Op3aZDKxdrtt5+fnDkDY8Xa79XfB6BvslcHWXHEd4AGg3BcdwFriN1mWOfOqi5bVlwB5ZeMYiGrsgoXjfhrc6Q+D6rIs3TaIo/ibDrjQI78Rn3RRPHER3+Se2uYYp5AzctRzI7Ck8vSSMDJh5LMwsnUY0GaWeVpcwsiEkZ+KkWZmjebxQTJh5O8fI190DZcqj4CsncMoeeongGiAoi6CCkEII2B6m0JHKQRcnnYRqhqYWXWXFb3v3d2d5y6jHH5TFk4Xs+p3nU6lb8o+KsvIdQpG+mSP4dAWNSRNs9Cnct1mVY0SGcBgwnDqeWowMEUwY69evfLAwBvcleWjP8iKhba0D3aJevmdNnCOTqGX5SFHnnSPr7/+2nc5ItASpEejkX377bd+72azaT/96U99IIHMdAq73+97PWzrSz5+p9OxTqdj7969s263a/f3986+6QAAffHwyM5ZBHjs3+zAYGXZgfWFFSUQEdwbjYYtFgu/DgaPYLxer+3s7Myur689vaPRaPguW8hLWWEdEJHTT447AY33qGDH+Fr0XV2sO5vNbDgc2tnZmWVZZufn5/btt99aq9WyL7/80mazmRVFYa9evbI8z309QFked/8i2F5cXDhTuNlsbDabeToMgfjVq1fOcpK3T9GUVR3EmFlFRjCYeZ77Am0AXAsxhKCPnlSOGmNoV2Tq8T0N5HoPYiSDONYHMDiD3dN6GQw3m023MYAmDo6JyXzX2BNjpz4UUDQuKDOZyvNLwsiEkU/CSAa3mSWMTBj5IhiJTzfyho1Go4SRnxEjHysv8sClN+eGOtWogkA4sB84kE75mx3ZCwIuggJcNHczdhphau4v1+EUCiooJz4NYyRxoEFQItiuVqtKUMBwqIOigEJ+qdavhqksJ/1Fnhi5tovfCAD0RQFVHQf2AVYPkGKb3bIs3WEB//Pzc5tOp84UUC+fkS1yxeC1XrUTBUCVy3q9doas2WzaaDQyM7Of//znVhSFfffdd3Z2duZ963Q6zvhkWWaXl5dWlsfUMuwDGSjrSmDo9/t2dXVl33zzjXU6HRsOh/bnf/7n1mw27erqysF1t9vZeDy25XJZm+4Da4Zdj0YjT+XA1gBN6ms0Gg6i6IJBF+woW96yQxDpH3me2/X1tW/Xi/4BTeSK7Nkaud/v28XFhb19+9ZznpfLpdsH7Bu6Jqiu12ubzWZ2dnbmbeGdMpvNxgaDgf3ud7+z8/NzazabNp1O7eLiwnVAv5QtZ6E0dcGmkU6hjCZpHMiQOKOpSTrQwZZ1gEg+OH3UXdPMzNumrDssogZpZa65DhujnxofOa5+Tb/4XePTfr+vvBSSe8bCuRrsub8OEBXgsFn6yTnKSlM3dvoh9i6V0yVhZMLIp2NklVVPGJkw8lMxUmVHfxNG/v4x8kW2ha9rZGTiCJDK1PFkzvsgGDSZHXdVQniwMPokSTDXYGx2ZNR0UK9CI+grQKEQnsrn87mzDZqfDbvAAlGAJDIAtA1gMjsyUvyGbOgjv3NP+kM+L8ZFoMS4YQeQM3KnHciPaXP6oQAU2UXdPrbX69nV1ZU7OTsIoXcFVmUXabO2lynlqAPWc8EmwhDCUAHQ7Xbbg8h4PLbdbmeTycRTIshXVyYD/YzHY2s2m3Z9fV1JjyDokOIwGo38t/F4bGVZ+pQ/djgcDm2/Py6wxQ7IbwY8VquVjcdju76+9kCCnna7nQcL0lGQrcqLY+12287OzqzRaNhwOLTJZOILabH5LMv83jDWBFDaRi79aDSyq6sr2+12njIDYKBb6lHWE712Oh27v7+3brdr/X7f3r5964GXvPXz83O3OXQD6LOombQEdKKDDOya9QZFUdj5+bm3EZuDyeYlptQDsAO2q9WqsrsVg0f6qSkN3FdjCqkSCga6IDvPc5tMJpUHQHxdGXMAlRgXAZEdr7DPLKu+eFMHCNSPr2nfaaOCCn02qzLuGs8pCjh8TuV5JWFkwsjnYmS0nYSRCSM/BSOxqUbzuOYsYeTnwcjHyovMcNEZNTgaxneEq0+gvEAPxSjYoGCMT5+aIyOnBqhMlzJfBEIN5vqdJ3EErNufcl673fYdWprNpucIDwYD76tZNSVCmR1VGEY7GAwqObg4qhqE5vVrXTGPVZkEjB8QQW6R1aCNgJ4GE4yRQMY7EwASZVcIGKQLKOuKXSA3ghrBl/sTBMk3z7JDjjjOSxCmncPh0K6vrx2M9vu9b6mqQVmBlIDSbB7eJzIajez8/Nzu7+/tiy++sMvLS7dNrp9MJs4aTadTWy6XdnFx4SkV2Ba2MRgMbLM5vshxu916XjupENgIL3nUrXuVeQRABoOBM2QscKYecsyz7JCyAIOHzxHsdWB1cXFhi8XCptOpffnll1YUhQ2HQ38xqA6CYNzJs8d/eHcPfeTdMrB1MKMxxQlfR2aA7mKx8PoBhU6nY7PZzBqN40sT0R8+wKyArivAxwD+drtt8/nc13owSFqv194vZRd1DUGjcVgUju8ysMzz3Ad3AK4GXvTFwFbBA/9CV8Q45M0AkvbqoJEYhT9rqpbGHmIS8iGu6gNAjE1xEBZBhjam8rySMDJh5JMx8uHFx5kdt5FPGJkw8lMwEozo9/s2Go0SRj6Uz4GRj5VPfuDi5oCGPuFpZ5V9UYaKRawYOdfo0y+dUIZOF7/xNIvxE/ipi+MEFT5zvQrMzLyNOLc+ILD48fr6unIuDkD9ZVm6o+BQtItzzY5sFWCAI9NnzUHnN2SlhgDA8hATWUrOXy6XFdYKGWE8HKeN6/XamUy2c4WB4t5cp9O62jYWgmLQgA2BiyDBlrB8pw86+GBR7mQysSzLfMeju7s72+/3Np/P7Sc/+YkHJXKuAT2cnAHG2dlZhaFloSmAdXd35zLa7Xa+qFQHLgR09YNXr155Lj+LvRl0sCUu/oM+8jz3rYrpOy9i5D0l3377resUJhMW5+Liwtrtti/cZj2CLk7udrs2m80cSJbLpQ0GA88tz7LM39OC/eFD2Lqyq8jl+++/91gA88RWwJeXl64DZdApsIa3t7e2Xq9tOBxar9ersPqNRsPG47Gz/fjqYDBweaInwFsHiv1+36bTqbedrYIvLy899YW4hL/AtOHHxB/6ooyY2XH9AaBC3GEQpLEHGcByYwcK4sQc9U+NDTHgE4+IE5HV5TP3pGj8U7ZOUyTibEoqTy8JIxNGPgcjd/vD8U2zbf/3//5/VgZ4MQUqs8z2pa55a1hZ7m2321ueH/S6+h+sm66+IFpnRNWu8jy3zc82ZpbZfr+rDH4552BzDcvzTOKGmVlp2y3x77g+qtyXtt0dX4590I9ZWR7TPc2oexd8JLey3Hv9+31peX7UGb838oaVZrbfV/ulM4kHLGn6fQ/3Ky3LDrIzO64jImVz+T+Wjndm9tAXHhwys9Isy3WH0cxoflnqOr7cNpvCZaAy5zMxIstyMysrthOJBrVF9K22jlz2+9KKweShQcfdQxNGfh6M1P7G8iIphToNrIxQZJyUjTI7TskTsJXBIs8XBaJgAiGDN2V9uBeCbDQa/qSPEmKw0SdrVQoGAIOk+cRv3761VqvlO/hwPAYzdSYGz9qGwWBg6/Xa1uu1sztm5oZNSoYGC7PjCzF54GD3HGUlaYOy/mbm8kGex6B1ZAGYgqbu+Xxuu93OnWiz2dibN2/s/v7erq+vf8SQwbTRV7aYLcvDgmuYiclkUnnh4mZz2Omp2+3afD6vLMhGtzyQdbtdm06n9vXXXzvAIRd2TIKNYnEr98DZLi4u/N0zDDrIQ9/v966v6XRaYZGRBYMA7JMHMZjNPM+9H7A4bPXc7/d9gTHvu2AqX6fh2R0JxpUUC+736tUrtx+YVwYqPIgxMNvtdnZ1dWWTycT+67/+y0ajkf30pz+tsGEsGMe2o+3t93tPbQGcaZsOAGHLsHuYWmxJWXHSOYbDoX8m/9zswPBNJhOvp9vtVkAL5piHeR4SkXO/36+wcchrs9nYt99+a1988YX7XZ7nvsMTgV19C6ZSg7EO+BQ46Z/GStqMTqkfEgCbZ/DINfqQCjhwLjYIa4nfcb0OwgE5jikrB1AokBMjtB2pPK8kjEwY+RyMnN/fH9qR5fb2/OefboDDj7gLP2MlAAAgAElEQVSm9+m3/W9Tzv7QDXi5kjDy82PkY+VF6EoEQkc12MF8KWPnU+cPgycGngzcUAwDOJ1K5BxlWjCg1WpVUSyDPIyB/zowBOB0BoY0BMARlpEdXJiSNzM/V9MP6INOX+rTOlO+5M3u93tn9mkfebUMoOkLBkN7maIHLMhnNjPfiUcBJc9zlxd/9/f3ngpBHeRVb7db32WH/GLSGXRgDuuy2+1cPoDWfD73Rb9/8id/4jMfWZb5GoAsyxywAS3kDkAp27FYLGy9XttoNLKyLB2wYFizLLObmxtrt9s2Ho8dFIuicNvAeZELKQecn2WH2R7sigdBbI/ZJWychw5YHOxiu936g5PZ8aWJahNM2TPLpouWi6Kwq6srtyt2T7q/v7dWq2VfffWV3d3dVRaP0n6m7Klrv997/9SXsIHxeGzT6dTy/LgbEA+p2Af9YMDIA/BisXB9XF5eWq/Xs/l8boPBwF9qiU2gQ3xQdUJqRL/f95k2mD3iAEz4YrFwRpxr0elkMnE/Wa1WnvJAmgTtx6eZ3US3zOrRTmIbumbAh9z1HS5m5nYR0/Doi85MMNgibmIngIqmm1FHZOj1uLJ0Git10A/JACuojCt11zGGqTy/JIxMGPlUjPzZT39q/8f/+38dtk8fjayR55ZlueWN3FbLlXU6bbed5XL5kBrYsSzPrN/r2f391Pb7nXW7PVuvV3Z+fvGgk8MszWq1tn6/9zAg3TqBRcbJYZBaPmQVTGy7O+g2z9ip9BD31uv1wz379u7dD9ZstqzbPbz4ebff26A/sG63Y9vtzprNhhWbzUGnrbb3f7s9HFsul7Zare3VqwsrzWxTHMjM+Xx2iGXbrVlp1uv3bL/be1sPNnWwx31Z2m67tWazZfv9znq9vm22G7s4v7Bms2k3tzfWyBvWajUfiMvDYLzT7dhquZK0wMJ+9rOf2fXNjWWW2Xa7sSzPbbvZ2t3drZ2NxzabzSzPHh6UrbTtZutkYZaBkVkFI6+u3ttisXzAyL29enVpd/d3Np/PbTgc2nw2t9VqaZvt1tqtlrXbHVsuF2H2K/e43el2rN/r2939nS3mC+sP+mblYYbvQG7sH94tNvfUz1F2fNl0wsjPg5GPlU9+4OKpkc4z6KXjCF+fFnlqpTMITQdUu93OLi8vrdE45IayYJGtRKkPICN4I3Rd8KpgpzndOivCQBnD6na7dnd3Z7vdzrcaZXvJ4XDoAdvMfDYDxatBsGvO+/fvfTDNVqk8KACQ2+3WA7jZEYCRE4bKQwVOwoATufT7/cruMsPh0C4uLuz6+tpubm5cVrSTN7NjqDjBbDazsix9Ia3Z8X0STD0TpGBI5vO5LRYLl5uZeYogW4+ioyzLfEZlPB77gl76Rwoezs/7RAi2rVbLAwQDb3bwIW8aBodBAc5Pf29ubszMfJ1Ap9Oxm5sbazQO+e95nvuD1WAwOATGh1krHgyw/cvLS9vtdj4QIB+a3HLYUH3A0Rki2COCE4WdsFarlfeFh6PlcmmvX7/2mS+z4+5XzN4xgAPof/Ob31iz2fT3rqCXs7Mz7wvMDay0pgLRJlIJNpuNb+GrgzACFD6V53llG2BshwGTzm7yrpHBYOCywo7I84dp06C83W59O2C2yKUN7XbbZzs5Tp+Wy6W3kf5in8hO04iwPw26+iDJjAGDW61LZ/cgABiAFkXhM8AM2GDYOaYpHbQV5hC9Af5m5jn++Bzn0wZl/OIsRoxDLH5O5eklYWTCyOdi5Hkxs/lqbuOOOd6Nx2NrtBq2Wtx4//aNvS3yjfVah3hfXN3Ym27Xil1htlzZsCytcbO2sZntlwcC8rzVsmJ6GORv9hsrH9IhV/uVbfdba7QPM42tvGWd4t7evXtnLTPHsSzLbPb2vXUfMLK33Vtma9uu5nY5vLRsciDaNqtbG7VGDy9y3tpmv7X+F2cP8fOgk/lmZdNiapOG2bZT2ig7rEdbF2tb3i2t/WA3vQf928ysledWFoX1mseNFXhQv7+/9wf0xvrevhiPrbz73l6/fm29xtaKYmHZNnNcXK/X1lg2LH+Is43dzia9nl3/6//nGLlarXw8MWjuzObX1lgd0k6b+4XPbDYf1lOZmbUeZnybWdOKVWHNommv871dF/fWnR/Izv7MbHlzZbvl0rLFjV12u7Yu17barMw2Zu1t2zoPZOePMPLu2rZXWxv/yZ9Ya7ew726+s+aq5y+75iFpMBjYMMtsv1paa9eyVrtt27JMGPkZMfIxUvJFHrhYuIcwEbzZkcGho/yWZZlNp1Nfh8JCSWXk5/O5r9VBgff395U0AJ3Sg8XSVAWCuj7lwtLRLn3Dtz5ZY2Q6vcp5BHIGegzMsyzz4zBXBHUGjew8tVwu7erqygfq9JOpUxaJan59p3NgkLSdrDm6uLiobCKAUU+nUxuNRnZ5eVl5CidXl/VNu93OZrPZgb2RKVJYS1L/MDrYDwbSOJumpcEisIB1s9lUFnjCtKxWKzs7O3NwwqZYxNvpdHyND2DVbDbt5z//eWVXncFgYNfX174odzKZHALlYGDj8djm87nvWmRmntbIbNFoNPIAwAMSqS66FS+yL8vD4tr37997/7///nsPVqT9xN1+OI48CSxm5vJutVr25s0bWy6Xdnd359P6u93OU20ajYbN53Pr9XqerqnMOQ+XvV7PhsOh/fDDD5VFvNgefoWdEoCVwdHgiy0SA169euUDlOVy6e8XwadJb1ImyqyaQsCMI+u3vvvuO99+mGBMXfjvYDCw2WzmtswDOrtoETh1RpFreTjH1zmP2UhSuNjxi/N0VgGbw050MMx3nXkkDmL3rPHTmQ0GggwqGTjTdmXnkKHGPM21xx+Jx/SdazSFlbbD9OliZcpTWLxUqiVhZMLIhJEJIxNG/nFg5GPlkx+4mLHgqQ4n16c8gjTHuIYdcBCUmXlgw3l1V56404hO+TFl2mgcNjoAXAhMsIT6hI0RDAYDOzs7s81mY4vForJGBmYNxhG2CUURHOgTyl4ul842wtTt93sbDod2c3NjFxcXHsD1BYqz2cza7banTiCPPM+dQcQYCObMUvA0v91u/Q3k9Afm8auvvrLpdGrz+dxT+AaDgW/wYHZg6H744Qdrt9suI1jQxWLhMz7oWvUNMNM3Zj8AqPF47OmEGD0MMO+4wFHW67UHezZQYLBBwCCgbbdbu729td1uZ19//XUF2KgLXeu7ONrtto1GI2u32/bu3Tvb7/f2+vVrD67MaBFQmCUClIuisMViYdvt1t9FUpalp9b98MMPlXRB2k16Sp7ntlwuffMPmGre2/Hq1Svf9GK9Xtt0OrXhcGjD4dB1cHt7a5eXlzYej51xRo7Y/WQy8W1eAcrtdmvT6dTKsqwMYNhYAx3iZwzyNHWPwHN9fe222Ov17He/+50DM35IIOM+sN6wYJpPjS/p7lsMHM2OazjYAhhbI9WBwQygeHt76+wy/WeQwuCMQSpsM/cirgA82Cf3wueYgVTA1JQH/Av2nlhIypEOThVkdYCFz3MO8RAfZNCnzB56BCzQBTLVGQf8Bn8H9PQ+qTyvJIxMGJkwMmFkwsg/DozknnXlRTbN0MBGUNPFrxhIbIgGSaYXy7L0dAR2mFP2g86jCAJ+u932p3qO4ZT65IyzMyXJdphMletuS3me2/n5uTtRo3F4L8Pbt289X/zVq1d2dXVVUQT18F9T6JrNpn355ZeVdTIKfOSkAwhmx4W97CIDY8cUM44FIGdZZtfX125ITDNfXV3Z+fl55f0FbFrBVC1OTdoGwUOn7pvN47tKYCbYtAEWBkYKBnM4HNr9/b1Pz+sb20nBwFZUZo3G4W318/khD/ns7MzvDbjf3NzYcrm0V69eef1m5jsXwox89913NhwOK9vHsuGG2QEIz8/PHfzu7u783SUELu6H7YxGI5dlo9GogA66geEh9xl2ldxg7A3bJ8hST7fb9XQ/Ahdte//+vQP8119/bWZWWStFkND3p9zd3dnt7a0zbtgMqZS6xoPBDumdBCsYZdit29tbm0wm1ul07O7uztN05vO5Dwqm06k1m4fNMbAp1n50Oh3fiXEwGNgPP/zgMuD9LpPJxNMd8C/WdbA2Rdlz4gn+ix/BMBJj2DyFQR11MwjEDkllAEiVpadNsKgw9cQe9EycRD8cB5wAGV1rg3/zO9dqeho2pMAKm3lcW1CNv5piEe0Q3ersBPaXyvNKwsiEkQkjE0YmjPzjwMjHyic/cNEZfdIn2CNobQwBlanwLMsqa2x4UR95uPp0T7DkqRtWjjYwZchTMouBUQpToCgXsAHs2ApTc9gJ8mxkkGWZffHFF/5uC9INGo2GByhdWKzpJGVZ2vv37+2rr76yV69eWZZl9v79e2cDJ5OJB1ReJkhbYd8wlOvrazs/P7cvv/zSzMyZJ/qhC7EJZrB1+/3eAyR1mpkHVl1vA1vF2qfhcOh9QY+NRsNz0VnfBKMKqNMWmIOyLH2DBgIaoDefz/16wJN0EGzBzOybb76xP/3TP/VNGRh0vH371n7xi1/Y+/fvnd0hkHLvm5sbm0wmnkKx3R7XaJEWQX7zzc2Np8sAvmbm6SgXFxfONDUaDXv79q31+327vb2129vbii02Gg2/jjSh/X7vgcjM7P7+3uW73W5tNpvZaDSyd+/e+QJn1jlh6+jx8vLS1wjQb+7farVcP2V5eFHlYDDwtIHValVZkwZQADCwSEyzn5+f22Kx8MHCV199Ze/evbPb21t/6aWCIsCYZYfNPAi8eZ47A4qO8KGyLO38/NzX6lEnvgZYKFDDgusAju2aGbwSbFutll1fX7tda7oDA0b6jVxgdRnkjMdjH0gxuCINjHQNwJf4hU8zsNBNCLSNuu4DENHADuNJm7XPgCMgxL2oRxlK6uBc1iiScgOYpRmu55eEkQkjE0YmjEwY+ceBkY+VF0nIh/EhUGEMBA+YFtIems1mZWMAjAa2A2AyO+5MUml0fszNRICca2YuiAhEbIKgwUun6nF2lDUajX6UpkAghs2AeTA7BoHRaGTj8djZHViJ4XBo3W7Xvv/+e39q1sDPFC/b3yrzQNBeLBaeX10UhU2nU1ssFh74JpOJvXnzxqdO7+7u3MgBP/5/8cUXzriwK5PKkDe1M206Go0qL/7r9Xo2Ho99Uwsz87YjR9oKe0T6Bgu7W62W5zLDSO52O1ssFh5gAGMAGscbDAYedLRdrGP48ssv3dFhGJE1LzUkjQPnL4rCc7oXi4XvDkj6BFPMBMT1em3fffedB4zIgpCOgYzPzs78XAZE+AepK6RW0FbYJNjGPD++qLDVavmGFaxDgF3iPVuALLbAoImBFi/iHA6Hvvsj7YMNAmh1LcPV1ZX7IWk4bGyC78NEYht5ntvd3Z2vB7i4uPDBCJuA6CCPgWWWZXZ7e2uj0cjMzK/B9wj6+AzH2ISD2QP6NplM/NUBulYAWROMiTc6OwCYjsdje/PmjTN0yIfBKT6tAZsATkAnHSH6H+DG2gRsQ4GE2KcsIHFFB3qkzHAN//lMncraKbgAPMoupvK8kjAyYWTCyISRCSP/+2PkY+VF6Mr9fl/ZwYRjmibB1C3TikyTYmCkReDMpCZQL84JUwdAAGQIMc8PC3NZoApDggMRlGgjAczsmIeK0tnRyMwcEBaLhTMV3333nQMkOacwjDqlz/QsaQHIBaPWNALkQyoAyoZ9BJD4g/nAaM7Ozuzdu3ceAF+/fu3MA7LCeDVnmCBDOglgzwJr2FQWM9/f33twp25NN8IACVDsFEXQP2yTemUXFxduF2y5HtMvCGikuwCc5D6PRiNrNpu+hmG5XFam70k1gYHDjugnwZLc4kaj4awiThwZSMBEbfnNmzeeyw6wYN+73c5f3Gtmns6iudCwJGZWGSwBFMj/+vragxnXwsCx66L6oZlVdh6CJYPZxdbb7XZlZ0AGSwRpmGFe8kmud7vdttvbW7u/v/e0AuyR/psdAQDbnE6nnhcPuwXzxgYDgBb31WCJTtCZMsoEWFKYYPhg92BrYfEAFII816BzBqj4yXw+dznA0GOn2h4GPrB4mppHHGCwBcNGrFMA2Gw2Pjhmh0ZNlcD32Ipf+42NIGdtI/WorRFvdGExKVUppfDjSsLIhJEJIxNGJoz874+Rj5UXWcOlU5tMZTLtj/IRNAoFDPgddkkNud1u2/39vQdNAqqZeQ4zSoaNaDabdn5+bt9++60LYDKZVKbpCeQIWBVMkGEbWKaNefJn2vXy8tJGo5HvIgQrRhDRtANYQdIuAAqC93g8doWxowwBXJnl1WrlQQCGJM+PC0qLorD37997ELy8vHR28P7+3kGXgA0o6xM9xtxqtVzuyG23O+T3Xl9f+7QxDkEfcZ48zytTzJeXl754li15zY4gfX9/70CNE+k6A9qw2Wzs5ubG2VHYEuyF9zaxqJVBCVO/ZuaLdFm7AOPEwEDXRHAtIEsKDMEPO+Y9U69fv7a3b99almU2mUzst7/9rY3HY9tsNm7XyJSgS6BAbzoo4x6bzcbTaWA9Ybvxiel0aqvVys7Pz32g1mgct0JG57DSk8nEg7UOqOgjQIkNT6dT97Esy/xFxTDzjUbD1zWQ8nNzc+O2ExezKzuYZZkvhmcr3pgWcXl56XahgdnMPChijxp38C30p0DLehBsmDpg83RwyUCRGAaLBxNGWhL3pk4Nyjpjwf3R6WAw8BhBG2CUiZ2ApbLMpMyovyFzAJJ+YCvRRwFeXSeksieVBBmk8vSSMDJhZMLIhJEJI/84MPKxWa4XmeGCgUA46nAEBDqA8NTACJYoAeYLoSI0s+PWtcrYcS7sFmwZaQYsRsRwUShOTduXy6ULGYWU5SFHnPxXXsY6n8/tz/7sz3xxqLJ3WZZ5v1BCnufOTlGvBkFYOcCKQG9mdn5+7qxRURT+LgZlz3AQnB/w4+melzJ2Oh2fRgcEYY1wPt6pQvups9U6vOyRnWZYRAmrQz46aRoKhjgXAXQ8HjsARzkNBgMHX+on1YHceAwfO4JhG4/HdnV15b+RrpFlmQdfZUfoG7bR6/Xs3bt3PnAhUPD+GV7ua2ZuczBj//N//k9PB2LRsTrxcDj09pGCQoDDofEP5K4+AvPGi4thCbHfsix9G2mCKHK7u7vztAqA6Ouvv7b1eu1sL77H9tPb7dbZc4LO/8/emy3JkR3X2isi56ycxxpQQIMtyiTe6FZ6OMn6mKiXOI+jm99MN5JJZuxDgmyMNeQ8z9N/Efy8PMEWRLChm0aEGQxAVWYMe7v78lh7uW/m5XA4WCBjPvF3xqBQKGg+n9vnYShJ0nxiyP1zHfw0lUqpXC5b0Oc+kZMgwyFBkmSa/9PpZGzq6XQ6A0dJZju73c7azAI2Phn2UhgAhmdCfpFOpy0G+DkBwEmWiYOcDzaQGEnM8zHTA48/D1IRWEzs2cdSVgOwK5JqD8bEGn9/3APJCFp3bDU+Pu+IMTLGyBgjY4yMMfLnj5Gshv3Y8UX24fIbw/ng4ZfPk8mk6T8Jzhg2rBCBxmuRKVD0A+iXvAkax2PUDtVLLDCi4/Foes/pdGr3g4EzcQyevx5aaoxwPp9bsSfsFsV7LP0iNfAsGcBFkKZ9KRpUJCEEST7PZ/2bP8XC/CGY0rEJA5JkARinzmaztmzvAxmOk81mlc/nNRwODQCYA3TTXAPQ5v+JRML2wUCiArMFG8KYJJNPbU9hkVgypoATFpN9QmC2ABlJJhPgvATQMAxNF57NZv+4k320Fwm2Anh9vFmypDO2hGdkfpDYrNdru6/r62s9Pj5qvV6rXC6bnIQNlZEJUAwsRcXTxWLRgjzBz88L4JfNZjWdTi2Y4UMcMMLUY8AYMmbb7dZsNZ2O9sb44YcfDDjwP+Q3JIgwcwRAlutJvAAt6jV2u52azaYuLi6MFaR98263s4CHHeGL+/3e2FSeq1gsmiTBs1YwqYwZvoHvkfgATB6ckdtMJpOzQM05ZrOZpCeNNnOBjRBQKXj2SS0rCCQnjBvsIffiE0V8GFDy0hFszidN/vs8H2Po/ZJz+5oJH599ss5z8m/P0Hk5BiAbH593xBgZYyT/jzEyxsgYI3/eGPmp44tICk+nk+1N4H/G3/6heSv3gcUDAwbnHwpg4W2Uif/49/l83jb8gwE6Ho9WMMvEco9+4nFaWBda0gKWOC+AOJvN9PDwoKurKwNAgJFiShwAI/VLo4lE1MqVJWdJxu4R9Anu/B9W6OLiQp1O58z4NpuNarXaWZtXAh/jgD6f702nU3Mu7smzZbTsxOna7bYxFDgW8wQYAkJop70uHdYKx8rlctbCFoaEYIexe7ZkOp1ae1L02Ywjwc8nEgRrxhiWDcaWAA2zAxMnyeaQgFAsFq3dqyQrWkX3nEgkbD+Wer2u0WikzWZjAYZEZT6fW/DxDJ5nEFkKB7xItnBs7MUztel02toZkwDgA9Qt4GfINhaLhe1NMhwOjTWXnvaTQEYCKOOnzBXJz2azMc02BemMMfaDBIhx8JsvMu8EVsAfsEWq4QMlrC8AijyA8T2dnpb8+TmJEd9PJpO2v9DpdLI59SsNrASQGBKkkfqQTEqy1QtWDnzM8nPm2Wn8kbknbsDWMyc8n5fWeBDAZhhvxgEA8dIIXp6wPX8Nzskz8dycMz4+74gxMsbIGCNjjIwx8uvAyE8dP5muxGj8kq0fcNiwj9kmSWcPxORhFIAMb+Ocg+/BJvA7H/QoluW8DILXufN9z4b4t9xEImEadc8qsuRIIIOZwVB84Oc6gB3sYq1WM60xBoAB9no9SU+tgb3khKCMc/A52BmWtwmYjCdsHsC22+3OCgVhSZFrnE5Pu85vNhsDgWw2q16vdzaPOEaxWDSGw++gzlzgPKlU1KlGko0/oM3P0E/jNMfjUbPZzDTsjPN+vzdWEskHzuGDDKBMoOV6u93OOgt5LXwQBGeMIUwN5y4UCtZKV4qcczweW1KFLXuWmSV8xgJ7Aui9Q0vnm5sGQWDsM8kM54KpSqfTarVaBur+c3RVgg3dbDZaLpcqlUrGnPrNHUkSYOXoUsRYetYcOQMSotlsZjbAfZEIAlZe/8wGlVwDG8QmYf4ovF0sFjYmjCO2iA9weN+SZPMFQ4o0BHvHFr0sYLvdmq/AXu92O9s8k3MCmh/bLayen2vsg+cNgqc23cRRL0/xjBtxwCfU/I0/eoDgD4w1Y+YTfv8iBWiQXHMPnNePb3z8eUeMkTFGxhgZY2SMkV8HRn6KlPxim6owqDgqE+wfKAyf+trzEPzs42I3HprP8PbNeQhE0rmUArYLA2cQptOpMV8UG7LEj7aZgMR3uFcKitFbAwywAb6ugUll0pFIwKAcj0fTPUvn7SUlWQDc7/dmvNwnUgH2lvAAylI47ITXv3qmAJmAl4rAeDEHBFS/x0ipVNLj46PG47GxJjgBrCXj4cERB4XdxZEIRP7zPhGguw/PTptUAjmFwbAe3W5X5XJZyWTUzYbn9xp9NvKr1Wo2XjwL7CUJA3tapNNpsynmCVaOACDJwNPLZggcXuJBwSuHl4ggE6EGAABfr9cmP6EQHt21l1lIsj1SkKewcSHs436/V6vVMtkBAIDfjsdjY1yDIDC2DfaN5MYz7DCvyWRU+H04RAXeAC5Mq7c3/JvkhaSG59nv9xb8kKVwTYqQSYy4N/yAtsu0V/YsP3YH24XEAvD29wgIkmh4iRUJGdIcbAfGktoNn1D6ecY2qKcBHHxXL8+WcQ3iH/HJM7ych6SJ7yOjARSw5Y9jtvQkXfmxlRfGJj7+siPGSNm9xBgZY2SMkTFG/hwx0seqj4+f/MIVhqEFTQaNm+aBPCgAAARTfs7PeKsmCPEQDJ5nTBhMbyj+bRjWwu/ZgVFjlBgXhYVe8kHhKw4OG+AHnkkhEMIqYfCS7No8H+wKzuALY+ng4jXv3iBhKXyXHhxzuVzq8vLS9lTAaXECKQqoBHOW95PJpOl1MVien7FPJBLq9XpmlBgwDCoJgGfIYEEI7q1WS6PRSOv12tio/X5/dh3GCnb1cIh069PpVLlczphKxpFx8nPJdz4GRHTq3Bc6Z5wQ1m+329k9E5T528tdPMMqPXXAYVy87eIj/E0xJ7bGGHMNnodkhY0HPeNNEORZCM71el3T6dSYb6/fR0LB/PhgyrN5f95utyZpgSWFueMz/lnm87kFeLpseRaZcYG9IjgRuACmQqFgdQ0AEgwbSaqfR+4lmUxae2NAkvOS6PlElGflufg89wwAADbj8dieget5OQTjSmzimswFdoMUyLN2xFLm2GvNsS1sknHjPolHxCTPsFIQ7Fcn8C/P2vnf8efHXgri4/OOGCNjjIwxMsZIjhgjf94Y+anji9RwYZAMHjf0Mcj4G/TLw/5z9MeHUcG42MzOD9Zu99S+0t8D7EyhUDAGgr8pZIQFIQAhr4BZ8/fEvVLMyQQkEgnr8ON/xj3550Nbyhs6Y4Rml6JdQBDHw1D5PhsZeuOhZSvBNZfLWatRDAY20H8OZiadTqvb7SoMQ2srOp1Oz5bsuR4MBU7tl4cxyiAIzrT8y+VSlUrFgofXNjPmBCsA1rMzUtRBii4/6/VatVpNo9HIAis6Z0lWDAvLRULjGRbmDBvyy960XoWFSSQSpm9HykEhLSCNvfDZRCKh+/t7NZtNhWFoRb/eLiQZe8XYLRYL6wbkgZx7WSwWKpVKkqIAc3l5eTZXtE7NZDK2wzsBl8CFL1WrVdVqNQMAScYeMSaAbz6ft7bJ2exT1y4va5Kk0Wik6+tr1ev1MzsiWAHgJCEUV3uAgNF6fHzUbhdtJurBAlkEMiX8gzhDYun3lIGVxo8Ads4Jc8g9wLDB0MEG+3jA+bbbqLsWSSTMI4kNBc4kwx4IGWfiHIkEz8C1GCPP0uEzHpgYG87rgc4zoNiWBzxiEjbn2UP/ufj4vCPGyBgjY4yMMTLGyK8DI7UxMccAACAASURBVD91/OQXLm4qmXwqUGPgPHPHjfg3Q/9GilFkMtHO5HTn8WDhAxcDwJsykw9DwYSynNntdlUqlWxygiAwOQL/RorgmS3unZa5TDZBA/YNGQXnY1mc9rU8lxQFLJgFwMGzEIwdIMC4+rGcz+c6Ho969uyZut2ujseoyxSb+mEI3tEymYwVqzJPBAKKZz0jkkwmbd+KXC6n3/3ud9Y6F8cDCDkfgYhg0+/3dTgc9Ktf/cpAkgJkxgaGiO9LT62NYZ6azaYFWwJeNps1vf7xeLT9SwBZAshutzP2BIfEFj3LRrEuzKhv5cy4kIwwptgTbDX6Za7NPIxGIwMs78zYP85MMoOdE/hgZzkvdoRfYYOwlfV63ca2Wq2q3++b/foWprDOi8VChULBfMrr+0lYkHYASvgcYzabzUxHzpiyySAAwnwyTjBZ+NXhcFCtVlMikVCpVFKn07Fgjmbd6/59suTZU3yXMfJzTkzZbqN9WHwi6GMSDDCSnu12a/IV5huQX61WllDx7LD5nslPp5/aEXOvJETMOT+DKSe+ekAB1GH5sFPA+uPA72MkcZTxJl74331sox8nyPHx5x8xRsYYGWNkjJExRn4dGPmp44vUcBHEWdbEwDAav8Tr3xZ506VIkiBP0PDgRLDHyaWnNqsYKM7Id/xyZBAEevfunVqtlv2OzwNIiUTC7vd0OhnrBAMEG4WD4SRoxQG8bDarZrN5FozpZOOXRGEQUqmUhsOhsQUYPACSTCbNgD3TgQHBqjE+vV7PWEE2KcSR1+u1SqWSttutBXMpYrxwfFhJNLbFYlGz2UyDwcACKvcJK0QgBbwB2u12q1qtZuwkwC5JlUrFAiP3kM1mTSayXC5tyRzWzRdDA/jcI0FmPp+rVCqZU/qd3NnDxbOL6Hd3u51JZA6Hg4bDocrlssbjsTFoFPwCNCQ8zD3a8CAIdHl5qcPhYEXLXM9LG0iMGAfsY7vd2uaGUqRjL5fL1h0sl8spnU7rw4cPZ52RJpOJJpOJEomoSLvT6ZhvSE8MDuz4YrEwX8D3CDzeDrkv5p3uSJLMZ9D0k5SRLOVyOfNvfLtarZ6x54lEwjT3o9FImUxGt7e3ZwCCz1QqFWOpAFvGkmJ0Aie1IbCQ3C+2AUgz9gCK12UDoqvVymoEdrudFdnzTAT8YrFo88kqAYDEtRlHfJiNWUmofTtlEm/uD7vDdzkHwCXJYpKXbjC3fp79ixTxmTn3gOLtID4+/4gxMsbIGCNjjIwx8uePkZ86vkiXwtPpZI7olxT9Uh9vir7gjwlkwP2bPAMPW8FDw2T4t08GPJlMajgcmhQAp8HIt9utJpOJyRc8i8HAs1xLsLm4uFCpVDIwoBCUiSsUCrZ8TwEj18ZImUi0z0z44XCwYOkdkU5NjGGhUFClUrEN+uhMw1J7JpNRJpNRv99XMplUs9lUuVy2MeLajAHjzc/Zn4RCRACCbk6FQkHdbtc6FdGq9ObmxjpJYXiMC8xou922AtRUKmV7bhyPRwsaNzc39n0AiIDFPEhREKjX67a87XXpzBXMG86CNne73dqzcy0AYLfbWbE0CU02m9VgMFC/37eEAeAmIGAvMI4wVwRXClKx8f1+r9FoZM/GPeHMsEWMMYGLg4SK+wAUpadOV8ViUel0Wnd3dzbmQRCo0WgoDEPb+ySdTqtUKpndIa/g4P4A2cViYXbL5/f7vUmT1uu1KpWKJS4UdEv6k2SNcccffGEudgmj7JMw/AP2PJVKnbFh/Iz9Tg6HqLPUcDjUbDZTr9ezZwB0jseouxf7phC/PLj7hAFfD4LAJDC5XE5XV1eq1+vWXS2ZTFotBkwc88z4+RoTnoN4QdIISMBInk4nS0BJXBgTEi0SLMaLOQDAfNzm3yTD+LBnuaWnNsteQhEff94RY2SMkTFGxhgZY+TXgZGfOr7IChcBENbOSyi4WZxU0pkj8kAsjTOYDCLOy9u6X3LmYfkZn314eFC9XjdnQ0LQbDZt8AmudGXirRZWBOPM5/N6fHzUarWyzRBxxtvbW6XTaU2nUx0O0eZ17FHBsjdFjLzBAzAsVWMIaG1Pp6jjE113ADoC0Hw+N6CAzatUKtaKlsAFC5FKRXtl0GVJkkktJFlXIzaqhKkrFouaz+cGkKPRyDTHlUpF8/lclUpFlUpF9/f3xiix1A7DwvWRlSCFAZA/fPigVqtlUhY/zwTOyWSifD5vAZGkgyTlxYsXGo1Gmkwm1iKVYAbrBvvHsjaMEGwFQQ5GJgwjrT6BDSYIO4Z1orZBepKKHA4HK2olmLBZoNc5Ax74C8mWpDN9MwmU1z6Px+MzxhhGJgwj3flgMNBqtVIqldJ8Ple73dZoNDKbQCIBMKOxhjXd7/eaTCa2x8dyudR4PFY6He2vQ+F6t9s1W4AxJLkLw6f9OXzA9yw0NRLMJ5s4ksggGUEihN+zjwnsJjGGZAE/YL5SqZTu7u5MTgDTBgBJOmu1C3NGvGJul8ulyXWoTVmv17q5uTFAHQ6HxkSSIHmmDf/3MUySyZKIIT4+Ajp+ZQLGj+5ax+PRElliwW4XFbez2SqyHOzGH35uvJ2zQhKvbv3lR4yRMUbGGBljZIyRXzdGfpEXLiZOeurA4o2AwMnNsEzHz1jS5CF8AbCks7d1v/QL48ZSIqzLfh9tAOidHKCivS1Mo3fWZDJp+msCHm/0BFve4oMgMKctFAqazWa2zM9E4TwEtEqlYsGsUChY1yMcYb/fn20qR3CmaxLARWArFotn7B1BGwaOQuh0Oq3VamXFwDAASEIAFwzw8vJSqVTUonc6nSoMQ11fX6tYLOr169fK5/NqtVoWDNBGUw9AS1oAYjqdWsACvDDOq6srJZNJG8cgCCzwM0cwgdgFxanYHswtNoWTsnM91yIRIEgw7pwfpnQ0GhlLSgI0n8/VbDa1Xq9tfgABOjKFYWjBWpIFExIItNIAlyTT5SNRkJ52Qoc1AtCRnqxWKyuG5pqw3tgzbG6r1bI5YAw9g0zx8X6/V7FY1OFwMMnCfr83qQ9MOMA4mUz0/PlzK0D3hd/ISkjYAGHsjsDsOyeRdDBmJBIUkp9OJ/NpAinnI1kiJgBMxAcpYlZrtZrJYQA62DeYW2ITzOBsNjPdPQwxjCctfe/v7/XhwwdVKhUDegDYs8LMFXaDPIr4x1iVSiX1+31JEcDN53OrCQG8JJlNIIsCIPEHCrEBfnwEJtn7j2eR+ZmP18R37jM+Pu+IMTLGyBgjY4yMMfLnj5H4w48dX6RLoSRb4ibYwsDAKjCIOAYsxmazsWDHm6xn7wAZut/w4DBUXJM3ZJg4mK/RaGQdiYrForbbrbFofmnWG/DFxYVp0tnB+3Q66eXLl7YUud1u1el0NJlMdH19rYuLC+tQI8lai3qtby6XU7lc1v39ver1uoIg0jCPx2N7u6d7DmMDoyFFoAjYoFemRSkGCKDClHFez2IQDHHw0Wikfr+vFy9eGIOAfALwu7y8NIDCgRuNhna73VkbVdqyMn+VSsXGj4JK5h4G5+rqSkEQaDKZqFQqqVAoqNfr2fjgaBQOs0/IaDRSu922APjNN9+o1+vp8vLSWFpYz1KpZI7IUjKF49zrdrvV1dWVBfD9fm8FwOPx2Ngsvo8NYe++OJquUj7In04nA3j2EMEn0OJj+9JToMdH8I31eq1qtapyuazD4WBafOwFJgcfLBQKFuCn06nJOpCBoLWGucpkMhZcCK6wqrCSjBEAhQSJmgZA1ssECKyegQVAmUPkDqfTSe/fv9fhcFC73TZpAIzVaDTS5eWlfvGLX2g6nWo+n6vT6VjiiT8DCLvdzuQcnvEcDocGeswRMWsymZgchzoWEh/sHtnEZDLRZrOxZLVQKNgmmvg/SRLJm0+yAVX+JgZQiI1EhkQAUKawnphKPJBkY0YsAGCIm8QCAIax8zGbFRHpiXGMj887YoyMMTLGyBgjY4z8OjCSldofO75Il0KMnODJhLAEDCvE2yWslF+qxnh5aIoEfXBmGRoDxCm9NANWAhbLvylTZLlarSwoJZNJK9jM5/PmAMlkVPB7fX1tRaDrdbQ3Bl2MaCnqi+8wUpxpsVioUqnY2/x+v7dC1vl8bnpl6al4sNVqqdfrWeDG4RlHzrHb7VSr1czQmYNEImGMiiR7PmQeOBnj32q1NJ1O1e12Va1WTZIxGo0MzI7HqMORZxPYtR0WAcaGgJFOpzWZTKwzEu1kmWOWyXHCRCLawI/rYhepVMra2MIEZjIZlctlc9xXr14ZiLCsjqaeYIojbTYbffjwwZhBXxyL89/c3Gg2m+nVq1cql8v2LIDMdDq1cb64uNBsNjtLREiOYGolmRQgm83avjUwz6dT1K3Ia46x7Xw+r2azqcPhoLu7O9MmX15emg8iU2Cjy1QqpUqlYqANg8k4UOMgSZ1O54xZXCwWZ9IP7BZ75NmOx6MeHx/14sULpdNRcXI+nzetN/NNcEdmUK/XLaACUNwbMg2KxWGxkD4g+clkMmq325rP55pOp3bufD5vvsVzl0ol259mMpmcsf8wz1zf10Ngy4VCQcvl8sy3AbRms2mShdlspiAIjOkDvBgrYtPHLy1+VQIA2e/3VogMKwiowWqHYWjSEuIrjOLpdLIkFXDGv2DjPLvppSk8K8/r45ov4I+PP++IMTLGyBgjY4yMMfLrwEjG7seOxHffffc52PEnxz//8z9/R9Eeg8xE+eVB2AKMO51On71dUuzI2zUTjSHx8JLMMT7WwfuAfnV1ZTIJHIXB99/hPhKJhCqVijnhZrPR3d2dgQdBjMEfDAa6vb21JGSxWFiBK07hHYag9urVK4VhqEqlYufEqWezmdrttrLZrN6/f29Lx7zNo4lGg804sezLGLF0jGF7yQpjAjilUikVi0VdXV2pVqsZ0D08PEiSMQcvXrzQ69evLSBfXV2dGWIYhtaRh/tgTmHJPl4yR1oCgwBbxf3R5SgMQ9srAyeaTCaSIglCPp9XoVCwhEF6Sj6SyeTZHi5IOgAxv3SczUb7u8xmM41GI83ncw0GA1WrVQM1ltthSSn+RAdO8CDg0q6VMUKDDnOHJIJ797UTjAeF2HSZajab2mw2ms1mxrRR7H48HtVoNKwd6m63M+kP7Blz+vLlS00mE5PjsHGm9LSnBIEPBhcw9EF3vV6r0WgolUpZ+2WkLel0WtVq1YI4z899odH3rPvhcLAW1cfjUdVq1WIBQf/58+dKJBL613/9Vwu+3Ct1C9gOBewUHBOAYRyJI8QIEg5fWwFTCbtF3JjNZqpUKga+u93O2EISAf7PuPA77lV66mJFfCOWpVJRVzOSKVYPmENkVsQHruH31vErIMwpbCCxF3/wsdFL1LBfSZrP5w/ffffd//0LIeOrO2KMjDEyxsgYI2OM/DowcrFY6B//8R//z49hwReRFHpGjQlEisCNMBG8gbLsyRJutVq1pVcmVJIFFK/hPp2e9nrwg88So1/ChnWByeL7Pvh69gQ9qiTrPFQsFs2JmMT1eq1er6dqtWpjwLIxb9OtVku73c426isWi1YoixQAQx2Px2YQu91O1WrVOtDw/FLESqZSKdVqNVueRn6ADjaRSFiQaTQatsx8PB6t4xNGAivXaDTUbre13+/1m9/8RtVq1aQTjN3t7a3JAMrl8hmIEQiRQFxcXJh9cF8AQa1WsyA6HA5N/kDR8mw2s3mBufIdc4IgUKvVsj1BeD7YTeyDc7CjPDaaSqVMrnI6RfvJ0C0J6QyMKZsy8hyr1coKLAEAEhbYuIuLC0taZrOZyRoADQAUOc5kMjGdMXIiWG4AbLlc/onEYrVaaTKZmJ58u432E0H///DwoFQqpffv35t9w8xKsqLiMAzNzrBdnoNAQ9JDspJIJM6KwgnSpVLprPYhnU4bgyfJWCXGGfAaDAYGCL7+AXZ8Op2qXC7b2CyXS719+9Y6k1EfwbjV63UNBgMb97dv31pSWSwWNRgMrE1uGIbGYNZqNQMi4hX+zbggZyEpHg6Hur6+1v39vZ0TP2Ds8G1qXGCuOfdyubTNLAEs5pjidiRZyCUymYwVEHO/zFsymdR0OjWgxE+IkyRNJC4AKf/GX30CzfPEx+cdMUbGGBljZIyRMUZ+HRj5qRWuL9I0gwtzURgrBpSJ4IF4y4SB2mw2xuAkk0lzcD7LQ/LGT1BgkFiOxBGQPbBfBsvOLLnyXYptJalYLGoymdhyIUV2yAFwUIzrb/7mbyyA5vN5LRYLW2b0hojcAaZSiooTR6ORnj17pvV6bYG5UChoPB5bNyaW5gGfdDptDuU1pP1+/2xJ1BsBrUoJUrvdTvV6/ewNH4aCotFKpWKgD7vlmYNqtWrjAMBy3ePxaSM9pAy9Xk/1et3kSDwX48TeGdw/bNV2u9VgMDDpBcxvuVxWv9+3BCWXy1mBMNKPxWJhLC9ButPpmBxlsVhoOByaXCeZTKrX65k0o1Qq6XSKilCHw6HNM4Xc4/FYjUbDJBYEAvadoSiajj0Utt7f31sgIJjCONJ5iwAjyRgTAjSJA9IFkplMJqNut2vJGclMv983sGk2m5pMJnr37p1evnyp2Wym+XxuTBsgiDQIe8XfGH+W0jebjbrdrhW2Z7NZvXz5Uh8+fNDV1ZU6nY7tdxMEUccpJDcENc/sMxYkQ7CNl5eXms/nVhgsSW/evDEGEoCn9oQ4ks/n9e7dO7Nv2NLJZKKLiwuTk6xWK7VaLZP5YKfcF3bqW+PiE8SaTqejWq1mSSE2SdKLD5Kc4aMkXsvl0uYN9hB5F0kJqwQ+hjE/xAJaZSeTT8XLJI7o2UkMKFgGJJBaMe+cH0aQpCo+Pv+IMTLGyBgjY4yMMfLrxsifLCn89a9//R2tYAly/M0bIRPCm3IikTCjZQk/DEPrrgNIECAJypIsoNNFJQzDP2GH0PLytsxyLEvlsCGpVMqWlTFkggHaZ/YYoX0ogDOfz/XixQt7U8aZCEI4JG0xmQQMBWBjgztJ6vV6plvmbR75CMuY6IsxaJaXYacAbMZ+tVppOByaQ8McwUjCIMDMUDhL15xMJqPr62trkQqQcA2ewS83A8LT6VRBEJgG/HA4WH0ADCvjIcmAk+Rgv98bm4ctSE/ymcFgoGQyqVqtZoCdz+eNeYTRq1QqlgxkMhnreoQ8YDqdqtFoGHNK0S/sJQW00hNjw/3T5QnwRVbw+Pho381kon1cKGgPw1ClUumsY8/xeFS5XDaQq1arxmwhF0H/T2BOpVK6vLy0+gjsisJa2Em/1A973O/3Va1WLThKsoJ15tOzTwR47HixWJg/IB2CaQfs2Lwzn8+bvwKAAAMSlpubG9VqNfV6PRWLRSu2pWAeP3///r2xWhSkw94SVEke+R0SBd/FiGswbqVSSdfX18b8kih6uQ3JAZKscrlsoEhCgJSH5O10OhnTTuxjHLxMiJUJZCh+7LfbrUajkUaj0dk+P8w7gMz9jsfjP/FPYiN6fOI1sZfPE0uIt8RfAOWPfhNLCj/jiDEyxsgYI2OMjDHy68DI6XT630oKv9jGxyyZc1GcD6NmIGFKWHakeM5/lomTZMt5sICwaZJMW0nXF78MySDzOS+F4a18u91quVya8xF89/u9aXoPh4NNdiIR7aFAQPzNb34jSXr27JkuLy+1XC71+PhoQQrmkE3lYI6y2awFrFQqpfF4bEEKffh+vzfj8BpT9MS8edNutVgs2vhVq1WTKxQKBduEjrdyxhOG0hfdZjIZ042fTidVq1U9e/ZMp9NJ9Xr9rE4gmUwamwBDQfcnH3xgX2FWYGsLhYIt7+J4nJe9LegGxfz6pV/kJJvNRoPBwGQpSDjy+bwqlYqurq7MLilY7Xa7FjCpT0DyAuh6h/I2TFBmztrttjGbMGGwIIC3FIHldDrVaDSyWghJZyDSbDZt7khg6DLl75f7A8ByuZwajYYlRNgLjFGpVDJpx8uXL60dcavVMvshSYLJgwUCjEiQkLXApF5fX6vRaEiKis+z2az6/b4FL+a2WCyqXq+bj2KD2WxWw+FQ2+1W5XLZJCHsW4JcAHAlMaK4tVAo2POEYVT7weah7XbbJBWeRUV2QZIGqABabJqKPGS9XqvZbKpUKlkSip8WCgVjiy8vL21vGmIZbC3g5kEF34PVA3RhiqltKBaLajQalqjhgyRIMP50XSNuMfbEwM1mY3GNPzB3jCkrJsQB4iVJbnx83hFjZIyRMUbGGBlj5NeBkcT1Hzu+iKQQw4BBY4L88jg3y9Ijxsn3JJkRSbIle79cyYHMIplMajabmVPzVgwbwZ4aBDMCy2QysQmVZMEJ7TfLpkx8Mpk8A0pJqtfrWiwW+v7775VMJtVoNFQul/Xw8GCTNZvNDKSy2awKhYKBIgGKiWRDQgIPBiDJOrHAhBFAOZcH70ajoc1mo/v7+zPJBs/EmLIRJcCL7CCXy9ny/OFwMIYnl8upVqtpMBgYU8HbPwwly7EUBgdBYM7H+OZyOXW7XWWzWZufy8tLdTodY+4+TlAANuaRPR4ImABlMpk0zboUBelisWh1C2EY7Y+xWCysaw/ShH6/bw7MHMAsMW4EVoI5y9Gwn17/TwAmadlsNhaAgyDQu3fvjJEMgkC1Ws0SJ7Ti6N7DMNR8Pjfw95r3h4cHtVotq/fguhQIS08F2qlUyoo+W62W+SjJHZrm4/Fo2mb8bDKZWBAdjUaWBBKU+/2+Fb4yFjwPgIof7Pd7Y/BJKglWMPyw7YlEQre3t8aasU8Pc4rPwnjBAGPjgHsmkzFpD2w+oCnJkkfsm32DmEvkRNgggXYwGKjdbiuTyahWq6nT6RjjzL0hlfBMHT7E2AMIJGYAgU+0pSf2GPtAElYqlVQqlWxzV4AJBj0IAtuoFVmJPzeJBD7FWMLqwYJyH/HxeUeMkTFGxhgZY2SMkT9/jPyUpPAnr3BhrNJTK0eW7mBBeFMl+BDEeQh282ai/Jslb7ecFyPEwQlyLMmzMSDnQi+OE8JCwdThDDAgOCLBGUkCOlY0s6VSSZeXl6ZDhj2pVqs2EbVazZ4xCCK99uPjo00yLWOvrq5suZ+xIIBSOEowZDzpzuSXQQnCyCOQZOC8LLPzOQybNqHr9dq0xrCVODpyCgAKrSpBhaC42+1MpsBzAMyJRMIKGtk8czqdWuHm4XA4m39JtqQ+Ho+NOQvD0OYEGQTzLsnOK+lM/51IJIyhhxnxmlyCO0EA2Uw+n7exoOMTQet4PGo2m2k8HluR8Xw+tyQnk8lYO1rsj6C+3W41HA41GAwk6azLEBIbxgemx88ZLK1nXDabjV0f2ygUClasi+3QxQu5EiwZOni6GlEbQWDHX4fDoY7HqNsTzHGtVlO327XAOpvNtFqtjBHcbrcmAcA+qWVYLBYmGzkej8aOwvACHNg0/i3JlvcZI/yCLknISEqlku2dw2oNANzr9c6A9XA4nHVA8ywkLZBJlmEWSR5JriQZiBYKBbsHbJGAj2/ia/6axA4YOcbCs4OS1O/3NZlMjAX0sg8fK47H458kxpwbf/OSE/wAZjQ+/rIjxsgYI2OMjDEyxsivGyO/CIJS9EYBJgcT4JflmHx0l3yO72G8sCywK37ZFkfzLGAqlbKAwoCMx2MVi0ULdtPpVH/4wx90cXFhA84gMkm+CJJB5M2W3bTX67V1w6nVaqpUKlosFvbGzvl5w2evgUQioU6nYzr8w+GgVqulZrOpZDKpcrlsoIhOH6DDOHlz53e85QfBU8Efy/aAPEEDYORNnPmARZ1Op3r79q16vZ6kiO30S/6wWiy7wh5+nCzAChJoGc/VanXW4ef+/l7L5VIPDw/WrhWmloCGQ3BO5seD4/F4tOBM8Gf8fUet/X5vS96ACOzpbhe1KvW2iXQBG2IJGUddr6PdzJvNphUKExwZM8YE5gZ7ISACuh8XaKK3TqWiDkww0+xf8v3332u32+ny8tLuj6JXLwFBioF8hda0nnllw07+sGEkrBB2TLAulUrWgQqGCzBqtVo6Ho9WHF8oFKy7GnECP4ZZ5WelUkn1ev1MooRtwghTt0HiwvP6836czMJI8z2kIdPpVB8+fLC4tVwuLTGFbUUShH3BBDI2lUpFw+HQfANAYO5JumAMmUekQ+l0WoVCwXwD2wMAYPA4RxAEJuk6HA6W4MBmesYXKYUHEcabueZeATA+SzJNAo+txi9df9kRY2SMkTFGxhgZY+TPHyP/V1e4GCyWv/0bNAHreDwaGDDxfnnT68r9pmM8BG/UBJuPHwhgYfmXgWYAGPR0OmpPiYaWCWVJ2z8HTBWBjPsloByPR/V6PQveLJuz9AmQssfE6RQVZtbrdWNWaPWLI/AmzjMzPoDaxcWFaekxAl9AjbOgrYVpwdm90eFsAM5yuVS/37ciZron+etjyABSGIam+WZuT6eTaXWliEmj049n39iThaDgwQLnQWuOY/Ecw+HQ7p1kgPkFXFjiHgwGZ5IGGDLmR5KxKIPBQP1+X4lE1JmLscZWjsejRqORJpOJPSOgRevR8XisarWqFy9eGMgxByyrwxIlk0lVq1WTkiDLkaKkCjtMJpNWE8ExnU716tUrGzvsl1qE6XRqXXtIcB4fHyU97WuxWCw0mUwMaJCTeIaYsSUIAXCeafUJIIXjBEzmEm04TCaMczqdNp/BvrAbpAZorz14AMKw8/hmsVg8C6beJxKJhIHUxcWFLi8vlclkrA0wcYtE0kuIAEiKw73kgCSTJNAnuKVSyWychJtx5Nlh0WB+N5uNptOpFfsTz7ALJEQkTZLMltiHxtsP44YPEV+YAwCbuWbOfCwlZpCAxseff8QYGWNkjJExRsYY+XVg5KeOT9ZwBUHwD5J+OJ1OnU+e5I9vyMlkUv1+34I9b4AwahiCf6vlLR8ddyKRMGeD0WNZnAfjPFyTCWICKexjCZPzMyBIMCgohcUhcAIKBDgGnefkzZzAPJ/PVa1WZdzJ1AAAIABJREFUzaBYpgZAAAYmFacNw1DValUPDw/GuFGgy/NIsuXe0ylqfYmsA/Zyu91aVxuCMLpUNL/ofSuViiaTiRUGsrxLkSdBAGfykg1YFMCsXq+bgXFvdJvBmGG6GDtAZTQa6Xg82n4XjBFBulKpSJLpidGqkzQcDgdjOyUZeCCH4N5JDjabjWq1mgVZggdjW61WFYaher2eMSI+cFH8SWDEidPpqH3pZDKx5f1er2eF0UEQWFchWFQfuCgGDcPQGOdms2lBCR/IZrNqNBoGsM+ePTO2EeAPgkCj0UiJREL1et2Alf06DoeDyWUYx2KxqLu7O2PF8UvshiDjGRxJZi9BEEkdcrmctf1NJqMi0nw+b9IBpA7z+Vzr9doAIgxD27AQNpTvk3DREhb9NAXP2AM1JV5iBbNcLpfPWiBT94I/wvpTK8GcELMYD+aOmBaGoSWmsNx0bOP32B/sLX9gJ2HT0+m0JUbYAUEd5paADpvm2TXsEomDX8FIJqOubZ4NBWhJ8Hlu2DuembhJHCIOxMfTEWNkjJExRsYYyXjGGBlj5KdIyU+ucJ1Op//vfwISAokkC8IEOm4MNoCgxE1LOnMkjCWbzVqrWgycN0quB/PC+fwbN2/FyC1gpDgoCGaAWCL3AwpD5XW5yARg8NAcs6laoVBQoVAw7asv7uX+MJhEItJqj8dj/eEPfzjrAgRYYRQYs39eWCMYA/Y5mc/nFqg9owUbwOeZE5yJwOULWblfZCA4KMHrcDhoNBpZgS1MgyTTZ/M9kgGCBQBHp6V0Om2B1Y8Z5yBRQK8tRQW/JAeAPkwvGyXC0sE6zudzu3eYDJyOBOR0OlkRLQGF5MEzNATJ+Xxuth2GoTF26XS0Mz3dtjqdjk6nqF0zCQHA5Vmu0ynao2Q8Htt4+eCNk9Me2fsUzFShUFC9XlepVFI2m9WbN2/U7/cN2Pk8NRL7fdS1qlwuWxAkEOInngX3ARibqFQqarVaqlarFtCJDYyjJJMTEQRh7bh3bAP2k+8tFouzPXYAcgI3AMN8eMDnGTk/97zb7UyKsVqtLGkCqHwSQfJ6OBwsQUkkErYfEn7zMTjAjiFP+Fh2gOQJ1p0ECP/0Bd3YDGNLnCXWsIcIdsi5ATQPhkigOJ8fS+mJ4eY6zEN8PB0xRsYYGWNkjJH4SYyRMUZ+6vgiCMqFfFtSJpw3av9G7m+cIMlSOF1qGDQCqWdXMehE4kn7TuAl8LBsT0BE95vNZk3zDLvEEqQkA0Imx3fJCcPQ9rVgfw5JBloEJfTPDD5L0kgB0LUej0f98MMP5njr9dqCPy1A+Sz7kOBgjBHM4ng8tmJb9q6ALeB8dNVJpVI23sdjVHzJPiowPLACdL5hPsbjsX2f8eW+0FKfTieTSex2u7NA7oNhOp02EOca9XpdtVrN2tcyxwR6GCK/e/vpdLLAut/vbck9mUxae1uujwYau0KGAttxeXlpc8n1YVPX67XG47HJH2CivX4eIBiNRprP5za+kuwz7Osxn8/t2UgOYFpgVGCy0UbD8mJL4/FY3W7XwBxmer/fazgcajKZ6PHx0YLPYDAwGQQSDfZgQW4AswiwM5bcO2wxQQx2jfHEt/m+Z72xXfTe2+3WvkuCA1hPp9OzvYQYa+7fjymxAekL7CvfRSozHo9NigG7StJ4OBys6xZjSMJB8KddbiKRsFqI5XJ5VkALKw6bTxtaYiVj7wEgDKOaDTqWkRRJT0CMnIq4ig8TB7xEDPv0KxF815/Xy234vpdI8B0fe+Pj848YI2OMjDEyxsgYI3/+GPmp44u8cCEJ8B1JGDQvbwBUYFkIPgxMPp8/Y8J4u8aheDuGrZNkxXyewcBgMQImA01oGEbFmZlMxt7mmSiviyfgEBxhnebzuV0TdgTDG41GtkTp2ScfmAEAlvhhHabTqTkTrA4FrJ4Z7Pf7xnQyNr1ez1gwCjC5d/Zb8PNBFx6CFsbI73ByNMeMKfONLAaWjWvAZjCXLJvDfsJc7XY7AwxYIs+sEGABHhgn9MKwXMhAjsejaZOZT+wQXfRoNDI2xt9LrVazcxF4Li4ujJmbTqdnen4fEDgfwRPmjNapQRDo7u7OgjBsKDpkmDCSr0KhYHZRLpctOCCBOf1RgoN8BlbT2z5FulKUyKzXa11cXJi8BBDwDLZP7giGFJ6SKGA/2+3W2HbGGpCXnlo0cz6uD1Di49wvyRnsZLFYtMSIjRMJ5uw3QlICCPhngmFlZSAInoqHOVen0zm7P0CAOWOcSFzxURjnwyGqoanVambnACRxjgSEuSAgE8Q9synJ7CubzZpf8nc2m7UCamRN2CT+wr2yISZsI3HCJ/mMnx83fx7iNQff+ZRcIj7++yPGyBgjY4yMMTLGyJ8/Rn7q+GIrXCwvMoGSzlgaDNAvNzP4vDEy6bzx8pbPmy5vlTCBfmmQJWX/e9gp/s8SM/cjPTEL3ANvvp7dIVjBqtzd3anT6RijxdIp3+33+8pmsyqVSgZqgKIku4disaharaZ8Pm+72k8mE9XrdTsnu6XjcIwxzs9mdiy3wlax3A7bWK1WbSNBAIfnhWkggOKgAG6/39d0OjVWjNatAKQ3asYUOQkOwjhUq1UDMozUM7i0y4WVYMmWv5lDxkaKdmmnKNNLajgPz+brFWD2PPNDAPStbzudSC0EU4NTkxAEQWBzj4OTtHQ6HSUSCesahJMTiPADxsknWtg1Nug3JhwOhwZ+tL6FTUbTvNvtrL0rMgRY5e12ay1wkfsgIULHTIcezrXb7Qx4V6uVBXv8HLYpm80aI4SEgHvgngErGDtYO5hb/ANQPR6PNr/ITEg0ADds0Etg8FtsEdlEKhXVZAB62+3WCvthrSWZjwVBtFcOmnqC7WAwsHnn57PZzJjrzWZjQLPf7y2BIAnwIJ7LRZuXApaSzlYwCPTYFr/zqyH8gV30rBvJHtf0unO/OvFxcgnocV7uLT4+74gxMsbIGCNjjIwx8uePkZ86vgh6ws4R6Ah+BClYJPSk/oZ94OHt2rN6PLAfeAzIv3XyGQzBs7EYL4aWzWZtoHzBLgesBawcy5k4NrtmZzJRm050qplMRpeXl+ZsXJfvYhR8hx3DN5uN2u22bSQHe0k72Pl8bp1VWI4noNbrdevkBGvYaDSM9SkUCrq8vNT19bXq9brpjnFU5o57Y8xgKLzRAqYELQK2D1R+WZfNCcMwNOZjOp0qmUwaY+U7C3lNM110YC15nkQiYQCayWTOWMbtdmsMnU8gJNn3JJm+Hvv0XXEI/qlUyroClcvls3tgTBg7SWf20Wg0rI2sJP3VX/2VSqWS2u22druddeDy7DDBzcsIJJkm+erqykD1dDppMplYsOTzMJ504iFhAcByuZwVdHO/fA/G+HQ6mQbcL5cz5wADjNx2uzXZDOOCj3rdNXbgZVPef0+nk9kBdomm/P7+3u4RUIX1Q9rg2Us6F3kGDO09gEmSAZggESKoEl+wZf69Xq81GAzsXgFdgjrX84Ef+RVBm3n3f5PMrlYrzWYzY98+BkiAxL/4EL+QucC4cl4+433IM4bYMfMShqFd72N7jF+4/rIjxsgYI2OMjDEyxsifP0Z+6vjJ6OnZF/82z0377iNMOEEjCJ42S5NkQMBnJdlbPIPDH9gwDJuOQ5yHyZWeikvR+GLEDD4MFIwKz1IoFDQej+37OAyBSJJt3BgEUacddm6n4wtSAIIZOvD9fm/Fp+y7gXRgPB7rcIg6DG02G3348MHe/kejkXa7p70reGZYUfaHyOfzevbsmarVqjEyfJ4gDGvR7/dtI8hkMtpTAV05QIY8gSCD0RaLRTWbTW02UZtOlodhI3FU5tnrxykMTafTVkQL04mGG+2wZx7YowOg73a72mw25nTFYtFACAkGyUs2mzVAazQaqlQqxubSnQldNQEIttTrmZkvmDWfRG23W9VqNTWbTS2XSyu69VIQLxOSIhkC7DNFocwxchPmDxvj+kEQWEA/HiN5Sq1Ws/09ADFJKpVKBrLD4VCSDNxvbm6s+JTPYafosvEB9NgEw8lkYoHZF53DkuZyOdXrdQM5QAUwww/ZzwOfgPHCdxqNhvm1lzXxHEhxkB14ZhH213eXYvy9rCGReKp7AbzH47HJdACeer1uNTkwiujLaUfr4x6g4rXlJADYPbbIygV+5+UexBPOh397/ToJugdwWGOeidjoYyvf98wef/uEOz7+/CPGyBgjY4yMMTLGyBgjP9kW/s89PFhgXLvdU9ccBsnLHDabjS3h8WbIsvpmszF2iiJOJpk3UgYF5+ah/VstzJx/Q6VwVHraXJLvMMG86QJybNjH23QqFbXSDIJoLwf0uW/fvlWz2VSlUrHWnplMxtrAFgoFTadTM5JisWhvyIlEwjahTKVSBrYUXlJci8Oxe/x0OtX19bV6vZ5ms5kymYzJSkqlkkkCxuOxsafIHDBamE3G0G++J0Xs52g0suV3lqABSt9xCEPknO1220B7sVioWq0ac3pxcaHxeGxzxDI912DeYONg2ViSRy7Bhnjz+dwKSLl3xpH6CQCU1rKMK8xvLpdTuVy2wlDPRH1cKJlMJlWr1ZROp23jyEQiYSBYqVQ0m83U6XTOADyXy50B5eFwMOkEc4Jteh9hXLCrYrFoncpghrzch8BH8gEDBzBh3xQ4M2YEno/rQQAvpBmr1cruDY03Y891SZi4N+IAkpDRaGTMP2wcLBWsWCKRsLklyfD6bM+gHo9RzQjJDM89Go20Wq1sfxq6JnEt2GySNj+mxAwAtl6vGzCFYWigTCLqmTkkI8Qbv4KBzeM3nhXmb/97bAOb4Zx8F/+RZHZA8gdg4+P4nGf5SLjCMLSx93Izfh8fn3/EGBljZIyRMUbGGPnzx0hexH7s+CIvXBgSLAdvwbw9w7JgQJvNxibaGy8DTWtW2j7O53PV63ULKBgOg5FOp8+67vA7lhYJBpLMAP2E8sYMu5dIRN1VYLV4FsAEiUKhUDDDa7VaSiSiDQeRToRhpMeezWbGrtBlCXau0+nYsvDNzY0VoDI2xWJRV1dXZtTPnj2zDel6vZ7q9boWi8WZThcgZzkYTbEUvZlzzVarZe1xKWadTCbGQuAYsGbX19cG9BglQME+C35eCF78DEOFhSAwAtw4BE5J8MYJADDm5/7+3rrxEFxKpZLNYy6XM8BFugLDAxCR+DC36XTaGNJKpaL1eq1er2cgRpCTZEXr2CrM5HA4tN3qAZPb21urMdhut6ZTBvzRno/HYwM35Ckwh7Bqu93OQJNuVVLEOKdS0R4cnt0BJJkPGNVkMqmbmxttt9szucZ6vbZr+/lfr9fGZgJGMG347m63s81OG42G1RvwTMfj0drqMm+TyUT7/d725zidos5T1Hk0m00lk0l1u13bX6RQKBhIwiquVitjnkkyYR0bjYZpx/FD5htGDYYVOwNMAdbVaqVOp6N2u63Hx0e7z3K5bOfG94mLk8lEjUbDmGoSNaQqHmgA1XQ6bewuPsj1PbNHLQRsIHOAvySTSYuv/JF0xggCNCST2DFsoV/58AlmfHzeEWNkjJExRsYYGWPkzx8j/9dfuBgA3/mIN+nZbGaOS7DjDZ5J9B1rTqeTOZzXdRL4AQfeYmERWK6WZPuJXF1dqVKpWDBgfwWkC6vVyoI7y7NMVqFQ0P39vb2BUwzqHZFAMh6PbUnXs0uA5Gg0ssA4Go1sOZuC3fV6bc5B61LYHN7oh8OhCoWCarWaFYT61q2eJUB2kEql1O/3bXm8Xq9rvV7r/fv36na7ajabZ3s5oDlmiRWARbMNcwXTAkCzZAwrhdHiqEgWGLv5fG6B/ng8GtChsefe+bcHfZzQM5FolmHekB5gRxQzU5Drn+/m5kavX782pnQ0Ghn4+aJR7KZSqRirfDhERaHz+Vw3Nzcaj8cKw9BYSoC81+uZJr5er6vf71u73t1uZ3UQPC/AwX4o2L8UgR/68eFwaEADkOfzeQ2HQ81ms7OldVg1mG9/bRIEOv+Uy2Utl0vT6fvlfh+YPIuEtAR/Apzn87kajYbZ6Hw+13w+V7PZtMSARDObzVqXMkADaUmz2VStVlOtVrMYwjx5uRRzCwsMU+ylA8Ss/X6vWq0WBcI/SlmQcAA2AAGyjcvLS3348MHGaTQaGYjgO8fj0RjR0+mk+/t7BUFgdSUEfLqUscrgVzVIPj1z5nXqnnHjgGUlznJ9Yqb/PrEJhg/G0fs1QAPr7eVZ8fF5R4yRMUbGGBljZIyRP3+M/NTxk1+4CGbpdNpaYEoymQQ3y43yHc/wwdjwto2BUoQHc4Bx8XaJzIG/uaZfPiQYJpNPG8V5xm4+n6vdblvXHe4JJyU4AQgsz1arVe12UdtW3siLxaL6/b6azaa2260Z4Wq1UqVSsU0IMd5ut6vT6aR2u60wDPXw8KBEIqGXL19ae1PGigApRW1MCZbIBnh2SSaxQHNMsexsNlMiEXW6IaidTidVq1XVajULEOi3JRmwstw/mUxUKBQ0HA4NxNlDpFAoWFIGOBIQYWOurq5ULpdNgoH2nWV82DoYPAqjkSvA5gBIzDE2gLSFfVZge/f7vR4fH22viXw+r36/r0QioevrawNgtO+A4vEYddjC0Tm2260xbg8PD6rValYw+uLFC9MwJxIJ/e3f/q3ZMAlrOp22lsOweUgzptOppKjrUyaTMXaXoLJer/X4+GiMCsw29ktSx1j6DmPdbtfGhoBMB6sgCMwO8Bnud7PZqNVqaT6fazAYWCclEj403clk0gpv5/O5bm9vVSqVNJlMzP/G47G14uU74/H4jKUkISPh2G63KpfLNo4A9d3dne7u7pTP5/X8+XNLxgALAHCxWOj29lbD4dBiA2zhbrczX8zlcup2u5rNZvrmm2+sTuXq6kqn08n2C1qtVhoMBmo2m+bPsHm5XM5Ak/jE2GcyGauJIEjDPmIP+Knf6BL7wJb99SheJxFmXg6Hpw1ZPVO8XC7PWGjmm2TUgy4+QM1G/ML1+UeMkTFGxhgZY2SMkTFGfhFBfjabNYYMcMlkMiYlYEmP5TeYJgaZQMxApdNpY5aQV7BsKT0V+HJuv3O9X+rE0HFyivYIvLAws9nMmCYCZL/fN0OHTVutVnrz5o0V++FEQRDYzzebjQaDgWq1mulZ0+m07VOBnpl2uNwXLUdZevf68/1+r8vLS2WzWdVqNV1dXeni4kLVavVsDIrFopLJpGlzmXyCBDKCdrut29tbPXv2TFdXV2dv7DBcs9lM5XJZ7XbbwBDHI6jBesICwaK8f/9ew+HQjJ7g7OUcBA3AP5vNqt1un23USDISBIEBShAEtoGl9AQ2MFj8HFvjPml7m0qlrAPX+/fv9W//9m9KJpO6vLw0KQWJCkkOtrRer/Xw8KDNZqNqtapcLmebbj4+Phqg+CDo6zBgqynsJjACeNtttFkiHYtGo5EWi4WNB0AzGo2sGB4bAHTy+byur6/VbDaVz+cN8BKJqOMQQIx+HIkBciBqDfDrRqOhdDraz6TT6dj9oBOn0He/32s2mxlrSBJGm2ECUzqdtg1NO52OfQeJxukUbTzZ6/Us+P7yl7+0c2WzWdvnYzAY6D//8z/V7XatxXSpVFKz2dTz58+VTqc1HA6NPUaWkU5H7Yi73a6CIDC2czqdqt/vKwxD3dzcSJL+3//7f3r16pV1XWIuvv32W0kRU/7s2bOzZBpG+vb2VldXVzaWJMqDwcDmdruNuoYxH/gs7CO2TFJJfGWFhI5qQRAYGBaLRdtvh3hHNzFsBRaP8wNE3vZhB9Hv/0/sXXz890eMkTFGxhgZY2SMkV83Rn4RSSHFYxhtKpUyiYRfikun02dacl8AjIFQCOmXev0D+8Fl4P3/pagwj/MSqIIgsELaSqWi5XJpk7Hf763Fqy9ChBmD/WDJHXnDarVSq9WyglT2QRgOh6rValqv12eFojAYQRBpogeDgVKplCqVil0PEKGLy+FwsJayvFGHYWggiWwAZoV2tgAhwL7f722c9/u9XYPl57dv39oSK6130Qcj8eDaQRDo8vJSi8VCk8lExWLRlrgvLi6MfYBFyGQyajQamk6nlmAQ9GezmRUJo7kFrPzyuWeJYJaQIJTLZUkygCb4+wQGmQFOn0ql9Mtf/lKPj4969eqV/v7v/96W3Lk/WCNkK9gNeuzFYmGbAyaTSZOxFAoFe45yuazxeKxer2fOSRei2Wx2tpRNPQdSgel0agExk8kok8loNBppMpmYhIHAPh6PLYHxDLqXDrAHCyyQDw6MrfTE/jIW+DY+CQABmNVqVWEY7c2yXC5tr5x8Pq/1en02XgRMAh/BGS37cDg0ORFsEqDFfTDP7969UxAElui8fv1au91O3377rcl7OGe73TYwhpFm3NmkFTkQtRqn00mtVkudTkfj8djs6vLy0ljpMAx1d3dnm2YiF6lUKsa0Uhzv6zdIqgj2JEteAkGiNZ1OjaGGwYSh9bIIahdI+JB7UHSN/yJLY0yxIw8qkmw8iMOwjPHx+UeMkTFGxhgZY2SMkT9/jPzU8UVeuCTZRncEd5bxWLbFMCWZ5AD2DodF70lQ8Uwby55efuHP7Y0ZzTKTA7vFQPf7fWM6GDQmFg364XCw5WiYr+VyaRvhwULCXqFNRdv7+PhorEE+H23ayDOhQ2d5tF6v682bN2aIbHgIY4Tx8F1+z3lo47rdbtVqtVSv103vz/PAKkkypo4x3G63arfbCoKo6Bi5AePwzTff6Hg82tLw4XCwe0EXvt/v1Wq1TI6QSCSsUHI+nxsr4MEVXe9wODT2hY4+y+VSzWbTAiiBD8eEEZ3NZvb8iUTCNjhcrVZmRzBfsIAkESQz+/1eHz58MBaO5APGi8J0SRZA+D+SlSAI1O/3jTX55ptv9Nvf/tbGmeth6zC1QRAYk7fZbEyWQ5tZdP+Ak2fH6dY1nU5tzxuSC4IARa3IZUqlktVUfMwEAbywXLR4hTVF0sK9wIBTm+GZH5hXnp89USh25VlIetBpfywP6Pf7lqTe39+r1Wrp+vpa3W7XJDPYEGD+7NkzSdJ0OlUul7M9gcrlsp4/f65Op6N0Oq3r62uTidANjWJnkg5Yct+O2UsNYFBJoCjCh6ljvPBb4iCM+d3dndLptGq1mo0n0jGSIC97kWQyF8aQGHk6nUwmwneJjXyez0kycONFyv/u43oE5vVTcon4+PQRY2SMkTFGxhgZY+TPGyM/dXwRSSFv9TwAzhWGoS3VSbLgQbABSGDlYO3Qj8LooNOk8w/BhqVmDJfAChuEw2McvjgSCQZaaN5kPfBR4AtDRVEwy6G73U6//e1vDeD4LuBzd3dnLMLpdDJjRQtLa1W61PjN/laraGNIDGU+n9u9Hw6HM/ak2+2aXnixWOjt27f23Owa3+v1tFwuLbAlk1Hb0cViYUvBFNA+Pj5aEKJlbSKR0H/8x38YK5PJZHR/f6/RaKR+v69er6f3798bm0UR7mQy0WKx0Gg0MkYjl8tpsVjYGKOPh9mqVCq6vr5Wq9Uy8Gg2myYHWa1W+vDhg+1p4nXPOEMiEXUH4n4oGCdpYZmc///Xf/2XVquV6aI3m40Bw2KxsI5Hx+PRbBpHpLsOXXL4LE6dTqeNRSOp2O12arfb1g2n2WwaEJ9OUSecm5sb5fN5dTodW16fTqeq1WpWZEsLV7TsFHZ7SQPXIPDCXsL00j2N8cGeYZ9JnOgOBZODPyELgjklsavX6zocDppMJmZz+AhAQ/0JTCPnZaxhXGu1miaTiVKplH744Qe9evVKyWTSmOnjMWp1S4FztVpVtVq1zmDD4VC/+93vbE7r9bp2u521Ps7lcqpUKqrVajZHtFGez+fqdDp6+/atRqORtd9lHxviyMPDg40xbBtFwEgO8B1i5fX1tVKplMk6isWiMdx0F+NaFCoDwMRRZEjITYg5xE9iKysrsOOADCDiEwriLPOKrSOxiI/PO2KMjDEyxsgYI2OM/Plj5KeOn/zCdTwerfiRwxfirddr7XY7ewiWfgEflsXR7WKo6F0psGMSKCYFlNCKewaPyaO7EbIHrwf3BZpIHQjUp9PJAgYFfDgB3+eNv91u63g8ajKZqN/vazAYKAgCPX/+XFdXV8pms7YJ5P39vVarlW02SAcnSdYpaTqd6vHxUb1ez5yeZeP5fK77+3tJsrdtjI2uUMlkUh8+fFC/3zcNNfptGEwAmGVv78g49mQyMSChnoCAuF6vTSNOQuDZpuVyqclkYs4uyfS3sGG0PM5ms1YkzDI+rCdzVq1Wz9gd9L3JZNSyNZfLqd1un+na1+u1OS8ADdAA2rAXsJ6///3v1Ww2NZ1OjbHz3cPQR4dhVOy63++tgJolaEAkDEPV63UrHKcNLIkKCQJBk449Xr8dBIElMFLEXBaLRTUaDQPier1uLDkME77gk6l+v6/5fK7Hx0dbasdnk8nkmd6ZYM+cHg4H04XDcAPs63W0qSYSKFjDUqlk95NOp7VcLg34ODKZzNlzH49Htdtta5ebyWR0c3NjMeL6+lqn00lv3rwxZhHZQxiGxty+evVKs9nMEqxSqWQMLhINivD/+q//2pJG7I6gSp0Lnc8Wi4Wxv9PpVIPBQK9evdLr1681GAyUyWQs5uz3+zP5ValUspgnyeYG+UW5XDYNPUyv70zFigUSDN/oADYQaUs6nTbWEFAjBnomjnMxFyQHXibhV1CIG/HxeUeMkTFGxhgZY2SMkV8HRn5Kdp/47rvvPhM+zo9f//rX3xFE0EVTNMikoJeEeTkcDvbWTgDgIZkIvwcEhaOAkncSAihBAyaRoJjL5WxSGPBCoWCBD2nH6XSye8LBDoeDAQntcaVILoDUggLW+Xx+xg4CUgSh8XhsBcuAHx2XarWaHh8frSDTt9CUnvToQRBp/GEa9/u9ST/QlPJ7OkGhCeeeYLUIfMg2uEc02c1mU91uV+/evdOLFy9th1prAAAgAElEQVQ0HA6t4xG6436/r1KpZAWvFM/ivLBTjCfME4WNaH6R2qRSKZNW4FDT6dQCPk4fBJEm+eLiwjomAZbIEdAyY1csCQNqsCeMd7vdVq/XkyQDADrlkGRIsm5Uq1W0KWMymTSWQ5IFWp5BktUhEJxhhmCdkCGcTicLHsfj0ZhD9P3b7dYChS+ETyQSdq90xEIuksvlTEaApCeVSmmz2Wg6nVq7ZhjF3W5niQD+cjpF7ZVhLP1GkgRyAjXHaDQyuQ6JQ7FYtG5RFxcXJuGp1WpmV0gMyuWyJTDv3r1TuVzWbrfT3d2dwjA8k0OR2MGcITl5+fKlMWr4fbVaNRYrDEN98803Fk8YexJXXyTPfFBAv9lsrA6gUqmYjIPNXEnOAD2ehW5ontHnPMiLut2ugRhxED86nU5WsI0d+ZjjJSrMD98jiQAQ+JvPcj/8DKmEZwL/WG/w8N133/3fz0eLr/OIMTLGyBgjY4yMMfLrwMjFYqF/+qd/+j8/hgU/+YXrX/7lX76jIBNDh93C6He7nWmaYWAABZZncQAGneAuyeQJMAS8gXvNLk7Nsqwktdtt087yNg2bQOcl2sUixWDyKda8uLgwxi0MQxWLRXM+pB2ZTMaWpuv1umlNkYLAcBK4Wa5nA8WXL1+a/INATMExUpDhcGiykuFwaEEc0EJqAKP27NkzAwd02AR75iSfz1tQQqM/nU5VLBbV7Xa12Wz04sULbbdbjcfjsy5GXBe9M0GCN3+AgwQinU4b++O7UXkmFUeAucMZmLfj8WhShEajYbUCjCXfwY4ADVhY7ImEh7E+Ho8WaN+8eaNf/OIX1tFqMBhYYuJZtPV6rcViYUvPyWS04SIMGEFivV6r0+mYnQIIxWLRvhcEUVcc7gEGDwamUqlYMlGv1zWbzeznjUZDkmypfb/fm7wD9gXJEa1Ob29vdXd3p/l8buwO56H9M4mH1+jjtzDizA8yIHwPmYGXofFc1DUQnEkEKTimyxGJ6eFwsL1UuF6j0TBbQeZBnCBRQ07Rbrc1Go0sHlEDASBuNht1u13VajVL3pCsEDMAOPYbgTkmsSGWkUDzzP53PqFbLpdaLBbGjBHYiWEUI+OjsGzENQ5+zmoDCQoMKT5Dcg3zCksOoKJF55w+ySHh9uAXv3B93hFjZIyRMUbGGBlj5NeBkavV6r994frJTTOYZAICjsfkBUFgTBkyCAII/+fhk8mkvT1WKhWFYWhL217TTmEjk86kAQi8/bPcjsHSGQmWAunFeh3tH8GSPW/D3AvgFQSBFRky4IdD1M2F4lWuD5uIU8B4wUIghbi9vTXZAkv0+320HwaGDxvK0jgFpG/evDFwQk+OEU2nU9NAAww8S6FQOFuu5R5hDMIw6gJ0c3OjMAyNCcSpYE4uLy9NH4xU5e3bt2eFxzA0gGomk1GtVrP9TSj09Y7C8j9OQqEpDALFouxp4gtrkepQq7Ber41VQvtMcGMvFc6dSET7u/z7v/+7/uEf/sHmN5lMqlarKZvNWq0EwEKhNmwX+0bARG63W+t8RODg3zAr+AiMqq8fKBQKGo/H1g53s9kom82eFa7WajVjecbjsRaLher1uiTp7u7OCtsBgu12ayzkaDSycQH8kArwBx/j/iiCJlAitfASEpbdYasHg4H5ul/uh6lHMoUtUCfAeEgRq4rfZbNZC/ZeV09suLy81Pfff6/j8ajnz5+fJXOvXr3Ser3Ws2fP9Pvf/96SKTpP5XI503Mj6QBcB4OBxQ4vTSEwS0+di5DOANjYO/53PB5tPqlroLaFGOUDOvGWcYExB7y9bAIJCnUGXI8GAbDisNr4m7dTXgCQQHkJTnz8+UeMkTFGxhgZY2SMkV8HRnpJ6MfHF9n4mAnHGLgRjIwb4A2VAISR8xBMBpNAsSGslTcsX+AmyViRQqFwtt8JgY5lfJg3JAIEJulpMzycFYdFY8pknU4n04vCBDGhLNnS8pad4TudjrF5FG6yH4PvVvXu3Tsdj1FXJt+JpV6vK5GIuhr96le/MiM6HA568+aNWq2WFUMPBgP1+329fPlS4/HYAoRnI1ju9YwK7MloNNK3336rRCJhQDCZTOwc7XZbtVrNloZZ5t7tdtYtCTClQBOH8EvFACudp/icLzZFbsL3/ZJwGEYdrxaLhUlICAYwo764lBbIgFwqFXV7WiwWtr/Jhw8fNJvN9Pr1a93c3NgYAYowlJ4JxtbG47EtX19cXNh9IQMCGGHEfJD9WEuPLQ4GA+sqhhxkNpupUqlYm+YgCIx5/d3vfmcF8afTyVgjxgEfYU+fYrFo9wobRGCTdMYekoAAIvil17qT1Gw2G5MLUMwLa0dgg0FlPnnmdDpte+7Qchp5he+qJskAHzadpPP29lan00nv3r1TMplUq9UyVqzT6eh0OumXv/ylEomEGo2GNpuNPSMbqrLqQOJC7UkymdTDw4NKpZIkmRRkMpmcATdxAxDBRyRZzcVyubSYhB0Q4wAQHzsZE1/ES0ySdLZawvysVk8blDJmxC5iwseMHTbuaypYuYiPzztijIwxMsbIGCNjjPw6MHI4HP63WPCTm2YAFLvdzpyOZWkCBYZN0P5Yn8kbOAVn+33U958JBlwABpiGzWZztqyOLAEtNJpZHI5JYiApioVxZMkTQwrDSBe+WCzOzj+dTi3Q+65GpVLJdoxHSoAx0vWFoMi1uP54PNb79+/VbrfVaDQMAGazmQUkWERYwlqtpuvra11eXlqgwiEI1ARQluhZTkcaglMSNAkehUJByWTSAJJWte122wobCdbdblcPDw/q9/uqVCoql8u6uLgwg6S4ERtgLxaCPAaLI8L8oPXms+ijkVnAcElRwXGtVrNORBRYwhAdj0cDc5heuljBaKBtfv78uemIkcv4ccdZcTKCNvd8Op3MZijwXK/XJjPwc0QwHY1GpofG6QF97ycUrcKGjsdjS3xg5WiJjI1j0yQtm83G5C+FQsG6BDFegN379+/1+9//3oKPZ9wBfJhvWCNkP4wF/sl9eR/keWHtE4mEtU3GbtCLS9L79++tixd1BYwn10V2EwRRK9qrqysrWEYGhZxkMBiYnySTSQN/EsX5fG6bkyJbYGw3m41++9vfajQa6c2bN5rP5zbmJNHYJgkn14Ft9iwuGnpsn6QJu5SiDSR54SExI5HFxqUnGQRSK2wKcPA/O52edOl+BQvmnPsnSWAu4uPPP2KMjDEyxsgYI2OM/Dowkhj+Y8dPXuHyF4Ct8W+fPCQTxBv8x0uMLKVLOrthX3hHAGbJmwfnzZSBoltMGD4VDjJpxWLRghcyitPpScPM/SLpQKcOOyfJdOmAWjab1WAwMA0vPwcEuRYaYK8nv7i40N3dnS3dp9NRtxo07P6+WEbn/zABz58/N4cajUaSZKwSgY+xZSmbv2FrCBiJRNRGlaJYdgnnnJyHgDMej5VIJIx54Zz1et0cgoDFhn/j8djY8uMx2pCSe6NbFVIXPsdSPPaw2+1swzofMFmaZg5wSJ94FItFkyh4OcB6vVatVjPGqdvt6he/+IXtZYLOmIDhAYoxhj0CPOmylc/nrVMWPpJIRB2oarWaPSMyIZIhHxh9h6V3794pk4naDiOjyWQyurq6suJY7vl0OqlarWq9Xps/sPN8uVw2CQjsIEkb3ZjG47ExVSy9w65eXFzYBp3YBucCiH2XLwITyQbskZfxzGYzK6oOgsBYzcPhoOvra3348MEKxEejkbLZrBqNhtUL4HuwdcvlUtvtVoPBQM1mU998840B+HQ6VbPZtI5PyEBOp9OZn+73e7unbrdrBdLIfbbbrcrlsvr9vgE+wEs8IyHm/NgoheskTePx2OyLMSLO7XY7kzd5qRpJjWcxfdG6Px8rJSTVxBnmjp+RYBMvf0wjHx//8xFjZIyRMUbGGMn8STFG/pwx0q+6fXx8kRcuAMUzMDwMRkTwI8DAEgAMGEEQBFboSVBIJBJnb6gMCkvkDCLnxyEosjwej1ZMzH1yDn4G48J94ThIJ3q9ngX7bDbaRBAnRrbBEjGgAlPkNbvL5VIXFxd6fHw829n75cuX6vf7tjHgfr+3SYQ9WK1WBsIAniS7dhhGbVan06k2m40mk4kxRDCPQRCYDh6WAfaTucvno00oX79+bSwrhdL9fl/dblfffvvtGbgB3gASQZ1r3dzcqNPp2Bixu7xnwujU4+U1BHruA10yy8W+QxNO5+sEYMpgS+g8VKlUTF5CsTmJAOOG/v3i4kLdbteCA0HkY/AvlUrW+Yj9MGCw0JbDziAXYE6xfYI/II/MAVtPJBL2PfaSIeEh6JJkYDfpdFrValWTycQYvTAMtVqtLIiiZyZosoQPA02gQasN4LfbbdvZvdfrab/f2zlns5kV33P++XxuPkVwxfdhw33R8W63syBOEAWEqd2Qnloq83Pmu9PpmGb9+++/V6VS0fPnzzWdTtXpdIyxQ+IDa0iAhT0lWQBgeA7063RXWy6XNt9easFGmcifuD+/igEb5z+DPAI5GOwd38du8F2SdUnmI55R9NIW6SkR53z+vmCIAcOPvxsff94RY2SMkTFGxhgZY+TXgZH/qy9cPACBBAOgFSzSgCAIjCXyb7Ys6eGYBDseAg05BoKj8DbKgPD2SnEguk82RmQQATBYGJYut9ut6XL5PxIaHxhhSSik5I2folPYpUKhYAGcsWDC+RwTxn4MGDTABlgwNr4dMN1i0C6zVJzL5Qx46MC02USb3PliWEAKA4NZubi40O3trS3fs4zunfndu3fK5/N6+fKlBTCWnWEb1+u1tT2lSJb9MpCccG+73c6Kn7kX5gWAoLYAWQ3zjGyGAIdsgCBDcMBGYcqQWjDGLEUDFASKw+FgO8RzTuweR6SzlBR172EuvW6ZdqbYH9cjUZJ0ppeHlarVapYc0OmM5KXX6+nv/u7vNJ/PVSwW7VnYdBDmDxaacTocolbOfB7wGg6HajQa1iaX+aBLEWPI/AJwSAw8IAO2tOVdr6PNN6+urkyK4ouQ8eP1em3dxxjf0ynSlBPc0fvjFzDXaO73+70lgIxzLpezeaSon8QrDEOTECWTSVUqFdNht1otA1UpAu5yuazBYGAyjyCIpBV8BltBAgHDS+KExt7rwH0M8isMJL0AA9p+VgYYR87FNRgHkk7PnDKu2LGXh5HQ+JURxp3YFR+fd8QYGWNkjJExRsYYGWPkF9GH8EaHNhw2BSDwemweyBfjEjx4y6SIEWZJkn2OB8eIGAx+B3NI4B6Px9ZpCeaFok0YIz7H9fzSv3/T5g2bAC7JOsUwyQw4+lwYKs8a9no9eyOGpYH1kGTdmAjiOBTMz3w+t7HGqWAGGEPamwZBoH6/bxIGNpeDIYE9Ys8ODKjf7+vq6soYM4C8UCjo5cuXVoyaSqVsf4vdLioeBvTQzDOm4/HYGLH379/r2bNnOh6P5gySLCgT4KQn/TJOB2NGcgCY8XNAizlF40zdAAwoc4XcggB5OBys0PiHH35Qs9lUs9m0BIikhQBB21UYQ5zZL2ej8w+CJ1kCc0pQoEvXu3fvrKUx56GAHLvNZDLmTwR/kjAf2Agc4/HYZEdo8i8uLkxLDoNJwrHdbu38JGWbzUatVstYLvTMu13UUYxOYFIUdJFoMK+bzcYCOjIbxhsmLggCzWYzFQoFGxfux/s+zz2bzQzwwzC0jSPT6bQ959XVlcIwVKvVUiqVsnbX5XLZGKlyuWzXOBwO6nQ6lhQDcMht6ARF3QE+yrVJWJC3IKfhWQj4JJgkcdgz3ZSQLCHXok7CxyL8i+clbsH8cXjbxS6kp/oiwIM4RSykcB4mnbgXH593xBgZY2SMkTFGxhj588fITx1f5IXr/2fvzXrkupKr7ZUn53meamJRpNitti0bhu9s2P5hbUAvbP+J9998vrLRDdtotCZSFMkasnKe5/G7SD9RO2WZaL6Sb5rnAES3yKrMc/aOiBVn7RUROCaBlyAISKAb5igTg5V0UrDL8S7/fTgcrKASpoC3Vh4cjbubECyXj7M6YIfco1aABwaHz+FtN5lMGmOHw7OZLDDgxue5m8rP47wYLgYQCARMh7zb7fTw8KBPPvlE0WjUBh1KsmNs7hUAgAHACd038Gg0qn6/f/KMADea50KhYPvCMS5DAxuNhqrVqhUho5n1PE+DwUBnZ2dKpVIKBALWbWm73Vo3JmQhdIdiPQKBgNrttjEX7HE2m9V2u7U2rZFIxJgmjJf1QIITDAZPtMl0ugJQYf5wPGZk3NzcWNerXC5nNgUDQgCFpYtGo+p0OopEIiqVSioUCsZqwhwRFHq9nhWuZjIZ68Y0HA7NnlzbJ5DyXLB60pExyuVyxjC7PgV7HY8fh5V+//33+su//EvrYpVOp+3zsUfWMZfLKZfLqdvt2h5wz7vdseWrpJNgQmDOZrO2vwQzkge3uBvp1Ha71XA4NNAKhY4DDW9vb5VIJE7kH8QCwJx6h0AgYHr+YrGo+XyuTqdjRbkEWuQTsJYwsIFAQJeXlyZn2G63Np/nzZs3CofDKhaLtj7b7dY6kAFW7DOJCDEomUzafqxWK5MFwdqv12v7TOyfmg6kKtgOiR/7QLtm2DhYUOzE/XwkUG6SQ6wjnhIrAAziMUwsCc8PJRwADYkh8dG/PvzyMdLHSB8jfYz0MfKPHyPfd/1sbeE5VsN49/u9veEScHmbRBsNI8PlailddoDfcaUSkkz/S9AmuBLQWACCCr9LkGYjObLk+3EIpBAAlOd5NtuEAMhxPbpR5CCAHKwQxYqe55netlQqqdfrnXRtgoWiYwwsII5KMMWQ3CGLOAO/w3E0bNZ0OtVisVC/3zcHBGgGg4G+//57SdLFxYUxDHSp4ZgVx1utVrq9vVWxWNRqtdLDw4OSyaTK5bJCoZCy2azG47EBMHr0cDisUqmkcPg4WO/s7My00DgeSQHAC1gi72B/J5OJSqWSPR/sk8v48j3ITNy1SaVS6vf7piemKDKXy2k8HisSiehXv/qVHcczBwT75blYcwJ/uVw+SU7xgXA4bECP9MFNQggSBDGKwvv9viKRiAGI6zer1UrdbleJRMLska5O7C9rSFCh1gB2E7/BT364VgTpUqlkTKLneRZMb25u9Gd/9mfq9/taLBaq1WqqVCpqNpsWQGFu6diEnyJrgiXOZrMWyNCA4xsEati06XRq7PxqtVIikTDWcjabqVAoaDKZ6LvvvtOTJ08sHsCkAaDEJmQTMITIPEhUR6ORMW6bzUadTsfumWL2TqdjsQsJBMCAlEk6JioUIzebTSsmf3h4UDh8bM89nU7NNmazmSUxSHmQCJEws3+spcsQE3uwHfabGMU9uxIL1++wC/+E68MvHyN9jPQx0sdIHyM/Dox83ynXzyYp5GE8z7O3fG6eYM8bIhpMNgXmwgUiJAtsMkb/wwfFIXkTxtH5DhaK4/7FYmGsCYGOz+X+p9OpbRbFrYAWDu15nmmsmYpdqVTsc3Be2EnP8ywY8XkEvGKxqM1mY6wjkg9+B+YGJ6B7DevgGscP3975XTTyzG7odDqKRqN21C9JZ2dn1mWKI/n9fm9g5u61e4wKGwYDBhvmHusul0sDEeQj3A/FxK5UhQDoOolbxxCPx83ZmCw/Go3MwQCl8Xisu7s7mzGBffCckkxKwPNynwQbdMPS43wNNOyTycTkPThxr9ezzkh0VoLBQ25D8XAgcJzdsFgsTL6SyWS0XC5VrVa13+/VaDTMRtGG0yEKppCAncvlLHhEo1HrNtXtdq1OgNqHcDhsLZVzuZzm87mCwaAxupKscNdNEGGwYfZyuZzNywiFjvM3AOHxeGzPTXJIzQhJJnvCoMlms6larWa+yffk83nN53Oru8DmAaXBYKDhcKiLiwtjVkejka6vr803SX7xGxKrer2uYDCodrttiacLTK68oNVqKZ/PK51OK5/PmzRnNBqp2+0qk8koGAzaoE0aBNBwQJIln/F43BIvAIX7I6GABQYAXalMMBg0SQodzZDkIGNhHQHj7XZr8iYkZ5JOTkWwHxJYEmxiuX992OVjpI+RPkb6GOlj5B8/RvKzP3b9LOjpBmw3EHDcRwAiOGMc7o0SaDF02BwCnStdcI/00B1zNO8Why4WC1WrVU0mE3sTl2SBFiPmSJ/jX/TS4/FYZ2dnqtfrevv2rW0Uml8cG6NutVoqFAoWAGHu0um0dWGBCQgEjt2AcPZwOKzBYGBgFAqF1O/37Xu4T4oe6W4DQ0UhbDAYNEbAnSkSDoeNNQSEW62WLi4udH19bUWTBCL+l/apzPCAwSsUChascYr9fm8B0u1KNJ1Otd/vdX19rXfv3mk0Gqlardp+wkwwCNNNTOLxuBVIUnyMo8K0JBIJKz6OxU6nvqN15rnz+bzpk/f7vUkpPM9Tr9ez+2CN0F1jb/F43BjUePw4PJLj/2QyqXQ6rU6no3K5rFwup3a7LUkajUa2nhRLw+qwP7BvkUhEb968MbsJh8N2j5FI5KSV7Pn5ufr9vkkHYHdgXUKhkPL5vIbDoUql0smatFotlUolRSIRdTodY0Txpd1uZ3IFAguJC3sKEBEwQ6GQXr58qXa7rVwuZ/IUfGW1WhnjRcIVDh+7R43HY9N04yfD4dAGNDJhfjabqVwuW+CVpGKxqHg8rna7bQlKNBrVJ598os1mo16vp6urK7XbbdXrdXuOwWCgb775Rn/xF39hNQisTzKZtLUl0FLAjwxHknq9nsliqtWqJcj1et0YU5IralHoAkYSyV7B6kqy/ez1epYsk4TSoY5YSVxC306C4wb/4XB4kqxLj6cN/H/u0z3NYl8BFP/68MvHSB8jfYz0MdLHyD9+jHwvDnwgbvzoFQweO81wo9wQN0ExGjdMcOQImTd3iiuRCkiPGlS3MxK/yzEhR7eTycSMMJ1O29vz4XCw43wc5HA42EYQkGCsSCy4l6urK0UiEXW7XQUCAWPC2GwAYzKZWEHqbrfTZDLRYrGwt2QCEs+USqV0OBys6JBiXdgbGJJkMmngx7E6TABHxJ7nqd1u232z9tvtsYUpQZFOQBzv4pyTycSO1zkmJ1jDCuCAq9VKzWZTlUrFiiFd3fdsNlMsFrMgxB7AiHEkTlee/X5vXXE4Bsem4vG4JRrUDpAosH6wqLB/2+3WNMwEX4qiAVh09Tc3N6rVahqPx6pUKhoMBsYCEoRTqdQJu8hawSoj14hEjh20wuGwvv32W5ul0uv1bO8AbdYzm81awMJhSWiQAMDm0lkLCQMF0NfX18ZWss6w6Pf398ZwSlK73TbWlQQEGU29XtdgMLC/p2C43W6bJGC32534HZ2h8OVA4KgJl2TsErKUHzKiyF54lu12a0XHnufpyy+/1OXl5UlNSTweV7lctkBKzQXtpwEO/IaEFdYKaRMyiXw+r0KhYPuLLAj/JUFB3gCQDQYD+9nxeKxCoWCSCNc/pUemjhbgyLw8z7OkZrPZWCevd+/eGdhiY4VCwZJoGHXAmAQb8HBPBrARYo9bjE5C63meSZMAZxJuLlcW518ffvkY6WOkj5E+RvoY+cePke874frJdCVv9Hw5X0rgiceP/fsxtHA4fFL8hhaTFo+bzcYmuANEMDy8US6XS83nc02nU+sMwx8cij8UqmL8fC/BDHDi+J6jeN6GuUfP84x14PsZPscch1qtZmwimnSCz2q1siN2GCOcU5JpgmEwuUee0y1CjsVi1smII1TYKYylVCqZQ9XrdRWLRe33e5s0z9F+Nps1XWqz2VS32zXtOse/sIi0hB0OhxZ0eSYSMZgAijBhAQEIin5Ho9GJYxeLRS0WixMGDobOPXInWMAC7XY7KzR2kweY30QiYayIy+xhV25gJDABWNw3ml2KMAuFgoF6Mpk0vTXyDwqL2UuSCuyQYZ7L5dK03zDHrL3nebq9vdVkMtF4PLbaDJKSVqtlARzNOvrn2WxmgDiZTNRsNq0ugGder9emYUf3T7AaDof2mclkUtls1toUs4ZnZ2fK5/O2vr1ezz5vMpnom2++sUJeJDzVatWYM2owYN5JnphPMhgMVK/Xbb5LNpu1IZ6wn0hAgsGg+WkikdDl5aUxc7DAMH/dbtcSDcAsnU7r/PzcAIrkDTB0AVQ6MrH8/uFw0IsXL6yltBuvAArP80ziQLINEMZiMWux2+/3TVdOC9/D4aBMJmM1IiQd7gDZ3e7YlY1YQJwgDnNy4jZf4PQEf+CUBFsHCInBMJh+DdeHXz5G+hjpY6SPkT5GfhwY+b5TruAXX3zxk8Dkn/7pn75ggBnMHBtAkMK5uEneKtlsWBGOBQlM7gPzILyJEkgJ0vl83mQMvB2PRiN78+etGeBwtdmweLz1suDMJSDwEvgJHmjTXdaEokfaWwYCAZMBwAJJjzpnEhi07ARmQCiTySiTydjbdSqVsueHXQyFQsaixGIxY7MoCCbwEiBgVzESl7kIBALW9QomkaLZWCxmDAwzKiKRiMbjsQ6Hw4lsJhqNGnuSTCatCxXO4nnHLjYEQD6b42QYNoIcrWphG13ZzXA4VCAQULFYPAENEp3D4WDaX0kG6DBLvV5PxWJRuVxOkvT69WsrEKZugoCCVh5WiPtIJpMaDodqNpuKxWJ69eqVPv30U5PvBINBG655dnZmiRNgRTJAAEun0ydtmNkbggzJBUfu3AcBh+N7pBV3d3emt5aOLHq/31exWNR0Oj2RqkSjxzkx+BdzNgCdWOzYVQmQA0RJJGCW0JOjgyfZ5Fk2m41Go5Gi0agqlYpJIv7jP/5D1WpVhUJBL1++1OFwUL1el3Q88o9Go8pkMrq/vzfAS6VSSqfTtl7D4VCdTueEgSwUCrq9vT2JQ9RNPDw8SJLFLFhSZC74P3KIwWBgMSqbzVq9R7lcNvkEtkJCzeBJYgxMtSQlEgnrltTtdnV+fq5YLKZWqyXpKNtBboVPE1OxdXybhAHb5xJGow0AACAASURBVGQKXyJuwfDDnLogg5/D9AEiwWBQw+Hw4Ysvvvi/Pwk4PqLLx0gfI32M9DHSx8iPAyOn06n+4R/+4f/8GBb85Beuf/zHf/yCzizb7dYM8XB4nIlBMOdBARxkDev12tqnSrIFQLMJE8Pi8SYsyQIwbEosFrMA6R6ZU8BKa1AK4jhCxYlc9o42tXxeo9Ew5iKXyxnoobuVZKwjhkNrVe49FAqZJp23cIyYt+3xeKybmxul02nVajUzAroNuVp/3v453sUwOcqG3QS8cSAkJgBjoVA4YRtZ/0DgsZBxv9+bzh+Zw3A41Gq1MtbFfVb2H408wL7dbpVMJjWdTk3ywD5Go1GTHOA0JBDT6dSkG7B/SCNIDAi0sA8E6PF4bEDJ/fHs2FmhULACS5gxSQZksC6ed2z/S6DgqLzf7+v8/FwPDw/2mYBnrVZTMpnUbDZTNps1lm8wGGizOXbjIiGiDgHwZF2q1apJKKrVqjzPMzaU+0Vig7SIgtU3b97o66+/Nh03IBAMHgt0qSeAzZFk907XKPxTkrHpodDjsFJ8iQTRZev7/b5ub2/V6/XMD/BPYgRBnXoIwHI+n5s8BjA9Pz9XPp+3gmW3i9doNDLmOJvN6uuvv9Z0OtX19bXZADIVZp7AHN/f32u9Xpu0CCa+0+lIkg2c9DxPzWbT/PvNmzcqFAqSjgwYrXeRE8GSuh208AEkWN1u1yQXAAn1FIAKCZord2DNiWuu9IE4QTwOhUI2y4X/xk/ZQyQ7brEw/x4KhTQYDPwXrg+4fIz0MdLHSB8jfYz8ODByNpv9771w/fM///MXbCgMCawQgZ+3Q96sYY94WI5DM5mMMVO8XfNwBCccmOCx3+8NkFzQgenbbrfWwjQYDNpCo0lnc0KhkLF1FCkTzFKplE3ZpnsTAQ4mgntGDw57RxEtDpPP55VKpSwYwcJwvAozlMvlbE1gABOJhEkCQqGQBX3WAHYHMCQIYWi0U6UWAO04xk7A5YiceQuVSsX07ExsDwQCdryLRh9mgn2BWUDTDuvXaDTUbDb1+eefKxR6nJ2Bg1B/AOBSFMt6cxT85MkTffXVVxoMBnrx4oU5RTD4OBOGQlrkONgSTsVaNBoN25to9DinhWC2Xq9Nv45tkQAsl0uVy2X7u2KxqEAgoPPzc3333Xe2vyQnJEp8dywWM3uZzWbGsNHOtVar6fr62hhhZADlctnAcLc7FhWz19Pp1EA7Go3a997e3podUQOCjAV5TbvdNhnDdru1I3YXyAEVjvUBuGKxaEweQxmJBYBXLBZTo9Ew5hRWmELpRqOhdDqt3/zmN6b37/f7xrJiF9RAvHv3zmoQYKhZJ+nYvvjVq1d6/vy5MW0vXrwwZm21OrZ7rtVq1q0Ilpd7XywWarVaZisAKcw9a0hgpwuWm4DwfdglJwG73c7kF9QwYH8UmzcaDWO7+T2YcgAdm3flX7DjxDvshPoEfAVfZZ8AEPwM4EF7779wfdjlY6SPkT5G+hjpY+THgZHD4fB/94ULvS6buNls7NgNyQDaSR4Eto+HjMfj5sCJRMI6DcViMWUyGTNEpovjLLy5EsRcVgvAgNlzfwbncBMJgHA0GtmmuW1fYcFYfIyBzjtsgtutZj6f28T3UChkk+ORGMC+tFotY7lYD9aUo33a1UajUWMCgsGgafAJ7LAGrBl6djTeGBh1AMzT4DspHu33+zb9nc4wdBuC0SwWi6brhe1brVZW5EmAzOfzmkwmJisoFArmtNgGTFAoFLLvwTEkGeuHc6xWx0nlg8HAgBhJCoMZXRYStjabzZq9bTYblctljUYj3d/fm36aWge38xGsGBpzSXYMDwMci8X03XffGVjf3d2ZLTCpPZPJGKgnEgk7kqaNL1rt5XKpN2/eaDweq16vq9vtajKZ2IT6QCCgh4cHFQoFNZtNJRIJTadT9Xo9u9/ZbGZsaDqdNukDrW4pzK3X63rz5o0xm/yhExHJIK2NSWpYRzoiMTMH0MNHYGFTqZQymYxJi0jCYDaj0ajevXuni4sLffvtt0qlUnr27NlJjQbM7Hq9tkQUzT86f5hMmN1isajhcKjvv/9elUrFurKtViuTfrhgDtuPP0ajUVtzWNV4PG6nAXQvwz632+MASfYV0OBnaSBAVzFmk1CU3mg09Mtf/lJffvml7u7ujHUlIUJHLsmAiv/P3mezWWvXCyNHPCYOcmJA3ACk8DmSVSRDwWBQ/X7ff+H6gMvHSB8jfYz0MdLHyI8DI6fTqX7961//6AvXT+5S6LI0sVjM9Khoi1l4NNJoaTnWdAMKQR1GZzgcql6vG+jwXbvd7qSYkM5LgUDAjpkJuiwGzrDf7621KG/Truad4342jGNXnIHPYeI4xZMAHIwEzjKbzSxAsxmwNv1+X+v12gr/OGIl+PDcvJkfDo+tg3e7na33fr83jTGgF4s9DuPD+VhrjC6VSpkkBLYDAx0MBqbvBZTQS8NolEolhUIhm7/BcfVms1Emk9F4PLbE4OHhQd1uV9lsVp9++qkSiYRev35t3+sekXueZ+tOIS3Bgha7BOLFYqHxeGwdoNbrtQqFgjExzHxARuDq3Ukm2He67MA+wszAKt/c3OjZs2darVZqt9t2XN/pdEyvT6CnEDMcDhs4UwtB0Ad4sVXWIRwO22fP53Pd3t5ancTTp0/1L//yL2q1Wrq6urLCX9hXiorpFhYKhXR/f281GwRvAJvOVDc3N8ZWsx6AtMtA3d3dmZQAm7y6ulIgEFCz2TTbhJ07HA7qdDoKh8MajUYWxAAohi92u13F43Fj5drttl68eGGAhz0AxtJROsBQ0Ldv3+rs7Mx8d7VamQ2SRCG7uLu704sXL2yAIjUEJHXMDqG+gz3L5XK6u7szAEKeVCwWNZvNDMyGw6HNBVmtjm2tSbIpXKd2gKJq2D5suVKpaDQaqdfrKZfLWXtdN9CTnLsAKj12O1sulxaXXaYXMHF168RUQMkFbFhx4op/fdjlY6SPkT5G+hjpY+THgZGc5P7Y9ZNfuHBQijApcHVBBYbODeRu0Sk36R7R8raIJICBcWiiYeZg13Ac2Lper2daeUCIoj4kE7A4gA2B1G1/S/ck3sxhkFh8tJ8EXBgKz/PsSL5QKKhQKJhulqNhhqwx04BNDAaD1nkJvTRGsd1u/1vhNAA7mUxspgr3DkhyzMs6BoNBtVotM0hkHvx/z/N0cXFx0gkG3TfrSoteAnO329V6vbYhi2iOYR0Xi4W++eYb/d3f/Z0kqdvtajQaWXccdLnSqSZ8Npup3W5rt9vp/PxcodCx4w/MyPX1tRUWD4dDY6/ocIVzsQ6sD8fewWDQ2pju93tVKhXrOkWBZyKRMKAkeLtH0dgmTE8wGNT9/b2xab/4xS9MzkOBdSaTsT0iiUHj//r1a5PXAOTY2V//9V+r1+up2+3q4uLCpEB0JoIlfPfunU20j8fjevfundbrta6urkx+wBrd39+rWq2q1+tJkkqlkklF2IftdmvDMV0Wnn/bbDY6OzszWQOsaSqVUi6XM605bPFut1O9XreaCD6PjkPSEVy73a7tHRdyqGQyqfl8rlarpf1+r6urKysCpj6DRGK1Wunq6spAB3kBgzmRZyyXS93d3enJkyfGIEuyltZIarAVpE8kh8QXhjsi4XDlDbvdzvYL9hswSSaTymQyWq1WqlarOjs7036/N98l2UQihg8TE0jIWT/XzhnACfDANOLfzGEiEaLBgSQ73fCvD7t8jPQx0sdIHyN9jPw4MJIXsh+7fvILFwYgPb5Nonl0H5y/R25QLpft2BVpBTILgIGe+xzzSbJCNbc4EcYHxhC2jTdTz/OMFYnFYsYIciSK9GI4HJ44NcBVKBQUDodVLpft7Xk8HqtcLqvf7xvLRzEqR5NozqvVqvL5vBXicl8wNMvl0sD18vJS7Xbb2Ad0+TAXsHbhcNicMhQKqd1uq9frqVKpWHBAesLMBXS87MF6vdZwODQAp/sT7BwBYLFYaDKZqFQqmZY6EolYoWQ8Hrd5DJJOjsxvb29tfkUmk1Gz2VS/31cymVSv1zMNLIFZOgLJy5cvJR1ZukKhYKwsGlr2hCALA4OdcbTNTJXpdKpisWjPmEgk9Itf/ELNZlPffPONzs/Pjbk7HA7KZrM2rR0GlWBIsgDYz2Yz69g0m820Xq+1XC51eXlpPx8IBCxpePv2re0bbAprGovFLMAwIBLAXC6XJ52V+FxkCa7OPJ1OKxgM6vb21oIkTC1rDIBgU65tDYdDCyYERQCWNYQ5putVIpEwSQx+HAwG1el0NJ1OrRC1Wq2eFMw2m01LOLHzSOTYFrlSqajdbuv29lZ/9Vd/pc1mo/l8rlwup06nYzUwz54902KxUK/Xs+93ExIYZtYE6Qygg5yBGAETjB+hiz87O1O321Wz2dR6vVa9XreEmmLnZDKpXC5nDDKfBVsHiOGD1Hcg3drtdjaThQSSpIXYStLuAgfJCiwhLC0xy/0+1oKkGUBxWXxOXfg7ZGj+9WGXj5E+RvoY6WOkj5EfB0a+74XrZ2kLz/wKHBl2yC2cBEh4q4f9ouuPWwDKAwIEu93jDBM6iVCYy3+z8OFw2BZhtVoZGwIoocfleJp7xDnQ8UqytpWweQS8QODYNpL2rDCAsDDcK+0ss9msKpWKJJnem+/M5/NqNBparVa6uLiQJGttyWcGg0HTv8MMuEwlGnnYU6Ql4XDY5rCwN67OFokABciwCGj3YfNgALmXdrttxaSDwUCJRMKSBtgrtNEYHyDCZ9DGt1arWcHjdrs9aW0rHVmkdrttQYvi8+12awMLYY45MqboNhKJmKab9ZtMJioUCqYd3myOM0uwi+l0qlqtdlL3EIkcO9u4cgQGBabTaWuLCsCuVsehl7RpBYhgwsbjsc20oD2xJJudQgcfGKZ3796pUqmoVqtZa1YYKvyMOT2AArIHkjXYOiQMBGX8jUQIqQRJjOc9FlNjv64kgZ/ZbrfmYyQN0WhU6XRau93OkpBkMmm/EwqF1O12LbmCBVwsForH41qtVlbUTXAOhUKWaGKvs9lMuVxOq9XKfAtmi1oUGKh8Pm9SApIDPo8C+v3+OK+FomvX5+7u7iwgwxBjC9g18dDVeCPT4Ts4KWIdALRCoWCtl0ejkbG7rmwKmQbJE3sI4OBjMNeAnZvIIgfiHtxiYJJxPhefxq5Go5Ffw/UBl4+RPkb6GOljpI+RHwdGjsfj/7Fpxk8efMwiE/wojCRw43wYGhO7J5OJHTMTsDgODQaD9mAwOzAEaGVhPXB4zzt2d0JTDCOIocCwhcNh06QzNHA2m9kbNW/VGDWOSeHmcrk0EOQoNBAI2KyGm5sbY6dKpZIymYwFBhgLju5hDHB4z/PUbreNVYBhjEQiVsAIWKClpnsLQEbgSKfTNgiOAOwaUDqdNolQPB43UEZ6AlMpyYpCkSm4jvH06VP7DjpGeZ6nfr+vxWJh2vx0Oq3xeKxQKKTBYKDBYKD5fK5Go6FwOKxOp6OvvvpKv/3tb/Wf//mfxrawt61Wy+QVAM5gMLA9Go/H6vV61lJUOs4KcWsg3D2GITo7O9Pz58+13W51cXGhfD6vfr9vbBeBknkaDOvkvgDyTqdjQzBns5nS6bTu7u6seNo9Jnd9BnsmWLt+0Ww2Lch+8803ikQiury8VCqV0uvXr61jFlIRSSf1H9QnoOlfLpf63e9+ZwGcBIJkYb8/1jlgS/g39RTRaNTaAk+nU2PyYJg2m43VZ8CwI7HIZDIql8tma7PZzFq8kvy4Re7MfWm325pMJiqXy4pGo8bS0llqt9tZ3chqtVKr1TJbJmCzFkiIYDuZVE/QzGaz5tMkJeVyWZeXlwZugEsoFFKtVtN6vVYul1M+n7fiW7TwfDfSmslkYrGGf+dZ6vW6DQulMJe5I4AZTDInF4ApSboLIK7mnriIfQAeMIAu6LhsNacYyLcAHP/6sMvHSB8jfYz0MdLHyI8DI//Xa7h4MB4AFonAxBsgb+W8ZbpSC9gm92iOByUAI4Hg39wCWRyVzeQ70Q3D8HmeZ8fdBCxYEt5qaePJfaAj3e12NqU8FAopl8up1WqdDHFkUxlUyLE4QYFuPQQSWAVa4w6HQ9OD02YXEIH1oIgQPTjfORgMNB6PVa1WTf++2+1MMxwOh40ZgGGESZVkgYG9QZuPjhr20J2Mjv6VgYYYJEkARZ/RaFTX19eKxWJ2XI2GdzQamVaeDjYUQk4mE/V6PZ2dnelwOOjrr79WKBTSZ599puvra7MJumYNBgM9efLEBvk9efJElUrF9hSAIDBwv4lEwlgmJDKAFBIAgmYoFLJCVWyaeRG1Wk2dTkfZbNb2IpfL2WwWlxHD9gh+sVjMClbT6bQFsOfPn9sRPbrl4XCoTz/99CSIELDwPexfksbjsS4uLixo4A/UDgAodCxyEzWAhPtmP10myvM8G9C4Wq10eXlpQRy/6Ha75gvEA4DAHcgoyYpr0+m0zThhHog7Z6dYLJqtD4dDtdtt26v/Oo0xuQD+O5/P9fLlS2WzWaVSKTUaDZNdzGYzSyBc+YbnHWtkrq+vrQUuvo1Pu+2tKSpnjyOR45wV9gpmH0kU7DCdxZAyeZ5njCfNBTjFgOGGwSXu0BXK9W+YdJcZJ267MQB/4MLeia/EXv/6wy8fI32M9DHSx0gfIz8OjOTU7seun/zCxYf/UI/uGiPMGSDAcSTHchx5wx7xJxAIWPtPjuJxhO12a2/RPCiLgeHwGbAhuVzOFs7zjl1v0LtT3Mtib7fHYYHFYlGHw8G6qRSLRTWbTRWLRS0WC5sBIulktgBFdzCazJTgqBOwggGJRCLWtpQ1pFXper024OTNHlBkXQCnxWKhdrttgx1Zh1qtdjJ93gVm3tIpPKZTzeFwsOfEaGF5kCS4/991EtgQ2AUKOmFACSBPnz5Vo9Gwgt/hcGgOt9vtdHt7q/l8rs8++0zL5VLT6VRnZ2c25A5pTLPZ1OXlpRXPDodDu3eKG3HGcDisarVqz0jtwnQ6ta5Sq9XKAj7MEgENJ4Udpr5hsVjo/PzcmFHkFIFAQOPx2AKo2x1nOp1aQkD703w+r9evX+uTTz4xOUs2m1W73bZ9+OUvf3nSZYt9owaCgIGsgGGSMFbYEQwPyQ5SF1gn/A0GCqaIC/sMh8PGxi2XS52dnRmAslbj8dhsnWN9kj3WWjoG3/l8bl2ZIpGIhsOhIpGISqWSdaZCXpDNZo2JAoSRrbD3+/1e3W7XJByr1Uq3t7cql8vGYC8WC0sqYDK32635MzOJNpuN2TYtcVkLGDyCsisFoaaAJJrYM5lMTtaU74e9Jx6w9/gbwEPcdcGYmITEwi2OB7zcmMn3cvFzJPfILZCZ+dcffvkY6WOkj5E+RvoY+XFgJD/7Y9fP0jTDvQnYOm5A0gnDxjE7b9ewDjwsxkcXGY7+OFLF0DFwjvcAERYABsINeDg9b+Toa3FgF4jQ+sICUhzJ50+nUyWTSRWLRfV6PWN2eF4MaDKZaLFYKJ1Oq1AoaLlcnoAfnZHQmsIqJZNJWxOe02UWfhjEYWOSyaQGg8HJEbAbaF3tLwEFQOHzpaNDwyKyP26QJxAC6tvt1toAoxdHrxsIBAysK5WKPWM+n1en01EgENDl5aUlFTA6BN1PP/1U0WhUNzc3isfjVrz79u1bed6x+9fV1ZUuLi4UCBznbsznc5XLZc3nc3U6HV1cXKhSqRgzgQQACYV0BA26MAHMrJObyNDKl8QB+9tut2o2myqVSprP5ydMLQGNImIc02W6CQRnZ2d238Vi0SQfDw8PVkj/5MkTAxIuZsaEQiGbYYNcYzQaabVamWSIAHZxcWEgPZlMTIqErAH7cH0L0MP3CDTIF0h2YEUlmYYcFmy3250w3uw1a0IiSYxBbkOheSj0OEiUdr9IiiSZbMbzPGUyGd3d3WkwGCgej6tWq6lQKGgwGOji4kLVatX2nuSHKxKJ2LqRWDIYMRg81nu0223lcjkDgIeHhxPGGI2+2xEOeQm6fthqkkZqDrCNUChkcYBk2gUs5BfEOOwgFAqdJN0k8MQ9bN99CXBjNVKLw+Fgpyj+9WGXj5E+RvoY6WOkj5EfB0a+74XrJ9dw4XgEOW7MdRTpcb4ChsbP8IAEJJgit6e+y3CgGyWA8geGCLDYbDY2l4Hgh/4d5oCAcDgcTNONVjgQCFi70sFgYOzYdrtVsVi0trfu5vHWzZ/pdKput6ter2eaT4weY9rv99YhyC1GzGazxioSMDGi1WplcgTkHDwXMxpoq3s4HIt9aduKk3qeZ87AsffhcFC/37euRpPJxKaquyCIE/f7ffsOHBc2ZbFYqN/vK5PJKJfLKZ1O25R1prpzPF2r1VSv1xUMBvWLX/xCxWJRT5480dOnT3V9fa3r62tzOuQVT58+tdkLz549069+9SsLVOl0WrVazdgSnA12ikAAa8e6ISHheBy2DSCllgHbIwDBMMOmAQrL5XEqPO1cSY7wDYJVMpm0+0WGQV1Cp9OxIYisF4kYdj8YDLTdbq09a7fblfTIZBGA8UXkPd1uV4PBwJ6PhAZGjUBG8TCFr0h4SLQAHTof1Wq1k8Sg1Wpps9kYE4etSDI23WXvFouFsZyuJhpQcxs4uFKG0Wh0MuuH9aAF8pMnT07kBCQMxC7ihvQoG+D78V0A2gVXZDPEkXA4bN2q0PnD/BLTYGvT6bTVPeCXbnLuSheQouCPsKIku/x/Vy4GWPzwv2E1sUtiATGb+ATjzuXXcH345WOkj5E+RvoY6WPkx4GR7MWPXT9LDRdOCLvhvmFiOO5RG4GdmyQoI2tgUTgO5CgR+QBSA4JYOp1WJpM56eZE8ME42EC06DgaDo4z0VGFRWUOAwwCGnf3eBO9sud5Bo6SrFgX1pFja+4Hg85kMieaeYyQ9QFMJBnDzHNLj4YBGwdTMJ1OTc9OsSwF0ejQYSsWi4V1XgKwkJoQzFhzSVYYDRtIMMU4YRvC4bANtORIv16vaz6fmwyAtYjFYtYhZ7/f27R5WMVqtSpJ6vf7Go1Guri4sK5Fb9++Va/XMyCuVqsGzgRSnh9bGwwGxsLCcrG/m83GuiZJMhaOZ2MtdrudtX5lKvxwOLSWrOwpwQc2hyCDfR4OB41GI9Nbf/XVV0okEiqVShZQ8vm8SXW4z8lkYoMdkUIg++B+a7XaSQJF8J/P5ybBwF+QGriyIZIaEhrpsX4Ev/c8z0AaeZTLnqZSKUuCsCuYYP7/fr+3+0bTDXsKkw6zBji5yVUmk1Emk9FisdBgMFChULC5M7CRDHqUZKA0n8+tzTDABJjDisF6kTgGAgENh0Nls1klk0lJj6w1nbL2+721J2ZtGfhIA4Fut2ssPMCIXAdmkmSFpB1fwiddlo1nWK1WJyckbjKOPRNb2L9QKGQsKs8Jm0vS7L9wffjlY6SPkT5G+hjpY+THgZHve+H6WboUwuAQZHkrdP/ODb4uqIRCIetQxCLxZj4cDu0Ij4CNkbGBvGXT3QRgw3FhBnjjha1zu6RQhErACAQCSiQSymQydl+TyUTD4VDL5dKChzv8kaDF/eEoBBg2B0nB4XCUgXD8zHrB6LlOC5ixtjis5x3nUMBQATpoonE+wAWWIRAIWFtT1oz7x4gPh4PpcXFY5pH8UDqBMXvecaZJpVLRZ599prOzM9t/l0WCvZ3P59a5JhQ6drSJx+Oq1+vK5/PmGHRZIlFBosLfA4LhcNiKRgEKl9lApgEIog+ez+fWkcnzPBuwCbvC50jH4DmbzYxtQu/uau6n06mGw6FpkWE7sQX2j4AQCoWMCSVoIeXodDqKRI4zQujQwwwaHB1NPlp8ZtcMBgOtVis1Gg11Oh0LumisYf/o5AXjSjJFMKdwlwQQmQx+sN/vNR6PzSYDgePcE4rHYTnn87nZLM/E/4fdd5lN/JbfpR3saDSyImHsUXqUwIzHYyumz+fzqtVqxgQSjEmqJpOJxuOxlsvj/BbaFyOVQXqCtj6VSlniSYIA0ynJ1oF9JDZiP9TJwIzv98dC5Hw+r9FoZHNOaEDAeuJvgClr5NZGSI+nJG7yDhuHP3GRCJBsATCxWOxkFhGxBImZf33Y5WOkj5E+RvoY6WPkx4GR7onbD6+f/MJFEJQej/bcxXN/jsBIgKEQ1S1o49/c4kY07bz1M2gOxyRIwDawQO6m49AEThZnuVxqtzt2KMFZCJIYSCAQ0Hw+V6/XM0NFY+uyG6wFwQYGcDKZ6OXLlzbjgcLc6XRqGzkYDKzgkWPbSOSxexPAi/HQsrfT6WixWCibzSoSidj3EqzRTcOOsUdokOk0xHE44MsQPpgEF7C5H6Z9szeSbF8BSY6TWevJZGL34na+cTsURaNRk7LAihF0Q6FjxxuCVDabVaFQUKFQMPCnm4/LvLAXMBmwZnTrYX33+2M3J7edLnIHQMhNSGA2SARWq+NEeEBgOp1aouJKHprNpnq9niUZJAOLxbGd8vn5uSqVig6Hg9rttskTSqWSttutut2uAoGATZynQBWbpUuS53n69ttvT5JlWCTstN1uazgcqtvtmmYasCYJc4/psQcCMOuQSCRsiORsNtObN2+sboFEMZFIqFgsmnQgk8mYZAC5CYkbbDzH9sHgsSal2+2q0Wjo7du3lgQhr8D/DoeDddKq1WoWn9g3kuD1eq1UKmVATzBer9cajUaW1OGHJED5fF6FQuGksBl9PKw5fkZ8wo6Wy6UxrsgodrvdSQLEUE6XNQbgkfngc6wNJwjcL/YeiUTMh1gHkm0AE5tGIkLcI7EEkN6nT/evH798jPQx0sdIHyN9jPw4MPJ910+WFOIE3IhbaMtbHxdH1vwdi8kNo2d1j+9YSLdgGLYNwAiFQhoOh/aWi96UxXC14wRNDA92izdT3nKlx45D7vEl2lYYGAIETNgP5R4wFqPRyAY6ugV5MJswKhQH9N105gAAIABJREFUM4gOIHODK4ELBojAEQgEdH9/r1gspmq1aoWL1BAAHrAUAIEbPDHOcrmszWaj6XRqjo9kAxClwJHjXBe0kR70+31jgUKhkLW2BcSQDywWC7sPOuoANDC0DNlEU813si/syWw2U7lc1mw2M8djT7BTHIQ9R4cOwwmLCjvJ8yID2e/3J0Afi8Vs78PhYyvZ29tbZbNZ9ft9u0905NjHdDpVuVxWLpfTbDYzm9psHlu6kryg618sFkqlUhqNRjaYkIL0SOQ4vX6xWKhSqRioIc+gDgNpistuEkhJGpjrQVBivoxbEwKDnUgklEqlNB6PTW7w8PCgh4cHvXjxQul0Wr1ez+oEXHZouz12e2JN+PvFYmGSBVi/29tbdTod02ojfSC5cxlJgAAGELulE5oroSDQ49u5XM4kGUgw6A4Ga0rswY4J5iSGhULBmEWSOGIktSXpdFrpdFrdbtfkQNgAn4vv8V3EC8/zNBwOJR2ZZvelaLlcWqwhaQc83RcAYswP/Yh9cGON/8L1/3b5GOljpI+RPkb6GPlxYOT7rveecAUCgb8LBAL5937CfwUfZAa8bbJx7pEfTBfMHkGfv0MLzFurKxtgIfhZAgGMBcfc6K4JLMFg0DrLIFtgg2HBkAHA/nFciCYULTjBHN0nhkrAwVAIVhRARqNRpVIpPTw82DogvQiFQjYZHXYLVkaSHeVOJhN7y+bfcABJarVaur+/12g00mw20+3trd0/P9toNBQIBEyPTVAE5HjWarWqbDZrwZG3fQKbuw+u3ng2m6nX69nkegIuTOxisTD2iePv+/t7Y4UIWAxG5Lso8OTIWJK1r0WSQHKBsyHdWK1WtgYwiNQIYFu3t7fGujDV3vM8TadT9ft9Y5az2ayur6+Vy+UM5GAmAWbWiWN99+ganXW5XFY6nbauX7Cvl5eXyufzFiiur6/15MkTnZ2d6erqylin7fbYoWw4HBrgEsg4ci8Wi4pEjoNUKZz2PM+AjOevVCrGHtJZCQmGy0BRL4CUZDab2QBS1mi1Wun6+lqSjGF99+6dJSTr9XFuRq/XU6PR0GAwsAQQv41Gj8Ma8/m8gSCsWigU0u3trQU/mDBiDvsKExeNRs1Hut2uzf1hr2GpSICoF2F9iC3EEZ4bG2s2mxqPx/K84xDT3W6ny8tL8zcADHskqBPMg8GgCoWC7WexWDSboMgbeQc2BFMaDodNrkJzgslkYkBOYkeiACvnsuouC80zwkADSsQEEgf/Or18jPQx0sdIHyN9jPQx8g/ByPeecB0Oh//vvb+tRwaOQI1x8Pcui8dbZTKZNCYNp+ft0T3yJ3hjGLBCvEViUAQcmDpJ5rjuW7wLMDBt7rFpKHTsDsRnwzb+11ookUjY4uJItKXEEfk8noHNKRaLenh4UDabtUBbrVZPjt3RINNxqlwua7lcWsDIZDIaDAa2qRyJEgxYW4zLlSHwp9fr2RwNWInVaqVSqWSMFdr//f44aR3NMBps1m63Ow49JLAScJE8BALH2RowXhQIwyBkMhn1+321Wi0bbpdKpczhYLBwmkKhoGazqd1up/v7e3mep2fPnqlcLhtrhPNyTMze4kx8N4zjdrvV+fm5scPcN44MG8T9MEMF2yeB8DzP5ogkk0ljnCVZq1eeYbvd6u7uzpgVgnw+n1c6nVYul9P9/b263a7ZF22MWadkMql+v6/pdKpYLKZEIqFms2m+4RYaI1+heDgSiRj4c4zuSpIOh4NpwPl+WMNIJGIFzcgaMpmMCoWC2u22isWiBc5AIKByuaxms6knT54ok8mYFGa/3+urr75SKpXSixcvlMvlTIcuHQM57CKymPl8bqDEz7jsMQykGzNWq5XOzs70N3/zN7q5udHd3Z3NEzkcDpZoplIpdbtd+z0SGQqy8X/2ACCnuxMJI7bi1kRQg0PSuVwujY3DLohbxB5sCzaW54LZ45SEuMeJBAkUnwlT6YIDsZXkhNjBvmE3w+HQElISS5oQ+Nfx8jHSx0gfI32M9DHSx0gwklj6Y9fP0qWQ4lICnLtIMHCwdPwOBsGNsnme56lSqcjzPOuu4z6Iq8ENBB7nf8AgRqNRcw6OwUulkrUiBXDQJLtH8rALvMUzhyGRSJjjrddrJZNJ63YTDodPikAJ9IvFQtvt1gbItdttXV9fy/O8kyLY6XSqJ0+eGBDt93tjYq6urqwQk6N85m9wvxjq4XCwjkGuLAXtLDIPgJtuOhSfcgTLoMjD4VhMmc1m1Wg0lEwmbc5COBxWq9UyQOBoudfr2T4dDgf1ej0bJsgx+fPnz5VMJrVaHec2VCoV01q7c2Emk4kVcAJcgFEgENAvf/lLs69QKKRKpaL1eq2HhwflcjkDymQyqeFwaNpcV5M7mUysNqFUKqlararZbKpcLhvzTOtg5kUAEoBMOp02NpLjb5IoPn86ner58+f6/vvvbfYEdgX446S9Xk/T6VSFQsGKlkejkbLZrEkTyuWysdXr9dr02Bzvz2YzvX79Ws+fPzcmx9XW53I5kxcxt+T+/t5sEJ0/7DLBFJ8jAD99+tSYR9i1dDqth4cHO/r//PPPreUz+wvjXavV1O12bVp8o9FQOp02dgmpBFIPWFrYNfwFGyHZJPAj43j79q3q9brVLSALgcHCzhKJhNbrtc2ZwRdgltHQj0YjzedzlUolY29JRpBcIFfY7XYGwvv9sXCaGMJ3LZfHIZjY4ng81mZz7ACGhAkZlXQ6C8eVbQE4JBCAIqxyKpWypJ6kipMO1sFdU0n2MuAmJ/71YZePkT5G+hjpY6SPkT5G/uQXLjYSbawke0sFUHgDd99AXQ0mIBKJREwnjDYTtoIrEokYaycdJQ8ciRNoMKLNZmOBjMUgSOTzeXtjRZMaCoWM6Umn08buIGFA70nBHyxGNps1lg+5BZ9PEAYsJalSqejNmzfa7Xb2/KlUSvf39+r3+xZsU6mUMZ3okUOhkGnZV6tjO1Dmk2AgOG2v11OlUjEjgG2jG9JgMFC/31ehUDCNda/XM+YB8BiNRqZ1DgQCyuVyqtVqxtzhxLBKs9lM0+nUjp7ptPPll19ah6hcLmcGnMvl9PDwoGfPnhk7CyuWzWaNmeHInkCBLRDsAZVSqXTCvjJIkWN52A8A8ObmRn/7t39rbAXOAzDzbEhMCD7YoKv/pWUse7DfHwuMJ5OJ8vm82XK5XDZ7S6fTZnOr1Urff/+9Op2OXrx4YXUCJFRID0ajkR2jw7y62m5XVuN5xxao5XJZpVJJvV5P7XZbT58+1WazUaPRULFYtDWJx+OW0BCYpUctcyqVUi6XU6FQUKvVMhAGZNfrtdlyJpNRPp/Xw8ODZrOZMpmM7QfBOpfL6eXLl9YxCmkJwZC5MZPJxBhGghyAn0wmzXb5/Ewmo+VyqXfv3qnT6ej6+tokS8hNqAtg6CTJVygUsjoFNORII2hn+8knnxijO5lMlM1mtd8f2/ZOJhNJsq5X2PRsNrMkCJ8m/vEdi8XCYirBHrAOBoPG/mFLxB/kESS/SHUAa3f2EAw7unbYbsDYPT1xYz2A5l9/+OVjpI+RPkb6GOlj5MeBke+7fvIL1+FwMAkCR62e55mT2xf9lzPwBs7C8Ps4BsfOsHB0/MlkMmZogJRbREhQ5u1/vV4bQ4d2lU2DUeGNPRKJ2M+iS14ul8rlcrq5uTHjJsC4Cw+TwPEpC85bNPcXDB67St3d3SmZTFpRJ2/l8/lcDw8PFhgArUKhYIYJMPGcHJMSiEajka1hvV5Xp9OxLjKuVj0cDms4HKrdbms6ndrAxX6/bxpunAIQjkajJ606YXe4JxydoETHoVQqZTMm/vzP/9xAOxgMarFYKJPJmGym2+3a7AWO1inEZlAkjA4Bk+JW9MBIV2B8cBiYMgKEJJtBcXl5Kc/zDOCCwaCGw6F124nH4/addM7Z7XYajUb/jTXu9/v27OPxWPV6XYVCwRKJ/X5vwy3ZLxKt4XCo8XisbDarTqejf/u3f9Pf//3f61e/+pVevnx5krhNp1PV63VJsuP/aPTYOrXT6ahYLFrLZ1i5UCikt2/fKpFIGDNYKBTs+/P5vNbrtR2vA+L4RjQatdkwh8NBjUZD7XZb4XDYvi8cDqtSqej3v/+9FebiqwRrupiVSiXV63V7LrTy2Pd6vTb/LJVK+td//Vdls1ldXV3p66+/NuCHvUaGgfQNuUo2m9VgMDAGHnulCL1YLCqVSimbzRpzDMAgmXElQS4LttvtVK1W9fDwYMnFD2U5gB1+hZxCeuxyNB6Prc0zAEZyQBIEk0o8pZMb7DcAE4vFTGbBYNt+v3/SRpqTB9hJYtQPE39inX/9v18+RvoY6WOkj5E+RvoY+bO8cAEIkmxjYRw42nfZHd4KOdZjccLhsB3rRqOPE+U5LifYs6kwd7AGPLjboSeVSqnRaCiTyVig4Lt52wZUYOdgDikahJFwwZA/+/1eNzc3xnrAMnmeZwP6ttutFZGu12vV63XrWCPJjquz2axisZh1Z6HTFEDLmzZGmUql1Gq1FAgE1Gq1rOVmMBjUs2fPlMvlFA6HT0AdKQgBn+8kGBYKBUnSYDDQZDLRer3WkydPJMkYFIoR6X4DawqLFYvF1Ol0bE8oCOaZsIlCoWA6/nQ6bRpsSaYNHo1G9u9Mfd9sNjZXZTAYGDARsLEJOh/BmtCRCdtByx4IHFvJSjIZy36/15MnTzQcDk33TSE1rCv2yR7BWFYqFU0mEzuev76+1qtXr9TtdnV2dqZ2u63Ly0tVq1UDfJIc5DvlclnZbFa//e1v9fz5c9VqNZN50I2Jn93vj9r7fr9vx+9IiABOwC+bzRoD2mw2dXl5qVgsdvLcMDcwkW6RuyQD/0KhoKurK5MjEJxev35tLBuA/PLlS33yySfGJJ2fn1sB/3a7tY5cLnvL719eXloBfbvdtmJ9AjJae/TqJBgkK+v1Wvl83oplKU6XdBJg2+22arWaJb4kJN1u1+oVrq6uNBwObX9JVorFokleSqWSdWFyGd/9fm8ymGQyaQnEfn/sNkULXU4viK+e55nt7nY7O/lIp9M2h8llWgFnfC0cDuv8/NxkH67sgpkqxHAKwYljgJcb3/3rwy4fI32M9DHSx0gfIz8OjHyfCiT4xRdf/OHI8SPXP//zP3/BhvHmiTFyAzAtMFuwfOjYKUzj+Dwej58UxUYiEet4FAqFLNgS+NlEWJFgMKh8Pq9Y7Djsjina4XDYutuwAYCYe/zIkTrggRNxpFkoFEwPi2YZzSfAgzO4syXOzs4UDoeVzWYt8BDEJJksIxA4dmDJZDKq1+sn2mzuBTkBz3ZxcWFrh/4+HA5b4KAQ0GUdkJHQbYm94LhWOupTG42GgcFgMLBWqCQJzNGA4SEoLRYL21fPO85nSKVSevbsmRVFT6dT09yTfLjyE0AD1iiRSBjLst/v1Wq17HNub29VLpe1WCw0nU6t2BWmhv0GSCneff78ufb7vb755hsLOAAQ3akKhYIVfMJwbjYbO+JnfZPJpOr1+gmTg+RhMpmoWCyq0+nod7/7nem6CR7olgGMYrFoM0dggLbb43wRANTzPEuCWq2WYrGY6cvxQUBnPB7bXBhm0kwmE2Orut2udSqC8T4cHgvupUe5EmtHMvDq1Sv772+++caeDYYb2+L7iRmHw0Fv377VaDSyQBYKHQcfttttSdLFxYXu7+8Vj8d1dnZmnZTcmEGiiH9hi61WS8FgUGdnZ+p2u1bwTytgkgB8djAY6PLy0upPOJlIJpOqVqtWsE7tiXQMtAzDBIyojUG73u/3NR6PjbkjTrC/SDhWq5UFeII890ECBNCQPFEfhA8jj+H3aCMeDAZNrgGzKMl8mUR7v9/b2iyXS/MjgKnb7T588cUX//cnAcdHdPkY6WOkj5E+RvoY+XFg5Gw2069//ev/82NY8LOccKEVdt/waUMJ08YxOYwXb+0YLEd0nueZw6LTlmSOBDPosnCbzcZ03NFoVKPRyIr1Op2O6XoDgYC1xYTVCofDxlLQKcmVFvA8u93OnB6W5XA4FuFOp1NzbED1cDhYYILZQtogSZ9++qkVp15eXurm5sbe0NvttmmLJZnjAQLISSKRiHK5nDl+vV7Xw8ODSUtgzzD8/f44yG04HNoaEpiRcRQKBZslQWcZCn63260uLy9tLzBSjnLdPeZ3Q6HH+Sw4wGw2s7aaME/SkflttVr/TbpAYEHTTF3B4XAwNiSTyahWq2k6narVaikcDqter6tUKun8/NxYjk6no3A4rFqtJs87tohFv/zw8KA//dM/NakPDDAyjOvra00mE3NUbIej9kwmo263a0Wf7IHneQbQjUZDn332mer1ugUI6RikAVGKqJFbVKtVk6gAIvl83vTLsM2lUslkPSRwJBjRaNQYs/l8bp2k8B+O59frtabTqQ0X3W631nYWP4DdIvCsVit1Oh2Nx2N9/vnnxkySMFA8i+Tkyy+/lOd5+vTTT1UsFi0WdLtdYyipiWD9SPiWy6VevnypSqVizBmsFBIpgiAxxPM8dTodYzhTqZQlFdgY9nc4HKyAnCYCBGvWbDgcajqdKp/PW7tmEs1YLKZ2u20sOUw+Her4uUajYYkc0hw3BiIlo9U1f++CDLKI9Xptwd7zPEWjUSuEj8fjVnyMvbmyKewYGyWGYZcAIwmEf3345WOkj5E+RvoY6WOkj5Hvr/D6Ay/eGCmkw5ilx2M2jqVdAySgcfRJEAI0OK7c7XZ2vMl/u7p2jo4xKOlxnkQkchwgdzgcrC2qeww4Ho9NZ4sMYbFYaDKZaDqdKpfLWcels7MzY+VwAPeIGd0wRahuIXIoFLIZFgRx1gd5B1rtzebYCYWgBFuwWq3U6/U0Ho+t0BCtLF193Ddx3uIxFAKcW2AK49TtdnV/f6/t9tiOlbf2brdrR6zT6dTmbtzc3FjLVUBBkjkqDBKdYgKBgLGXDw8PpoVmfThWDgaDGo1Gdt+5XO6kxWqv15P0ONcGBiUWi1l7UjpJBYNBjcdjO44vFAoGfAQAAkWxWNTFxYXW67W+/fZbayNbKpX02Wef6fz8XNPpVL1ezwIEumyCEC1vYbv2+73prGGLJOnrr7/WaDQypjISiajX6xmQcrTvsoxomPGL2Wx2Up9A0kNh6Gw2M200uvV4PG76+sFgYGzoZrPRd999Z4XervSFORkEROyWJJFBh8+ePVMgENBvfvMb7fd7a9cbCAR0cXFhUqjpdKpEImFT6PG3i4sLff7558rn8zanYzweKxwO2wwYgpzneQYso9FIqVTKgBtwJEbs93s9PDzo7du32u12xl49f/5ckqyQPBaLmf3SGpq1BjzcOhPYOWxgt9vZvBvAPRAIKJ1Oa7fbaTgcmp58Op3a/gWDQWsrTMxwWVyeYzKZWEEyzBpyJphV7IR4RvLMMyOfQYKBX3LPMMkAsCTTqbux178+/PIx0sdIHyN9jPQx8o8fI7GzH7t+8gmXJHMuWDmCtasH560RoOHNluNXmAn39wAiOo4QvN2jfx7Y7atfKpXUarVsM2AccF408HwGm5TNZm3AIoWj5XLZukBRxEtwxdAY9MYRLYWUgAMMGxvZ7/dVrVaN2YpEItZRJxKJWEek9frY7hSDRCIxm81svkOlUjGmdL1e6/nz55pMJup0OiqXy3p4eNBqtbK2vBgOQZzf4/iWY9tUKmVv+xxfw4pMp1M7Lue+CJTb7Va9Xk/NZtMAjRaynU7HgjnFijc3N9Ymdr/fGyMC28qROvu52WxMH4/jFQoFpdNp3d/fK5lMGtsIe/Xq1StNp1Nj9NzkBeZitzsWdm42G2UyGdMEU4QM+FQqlZO2xuv12vYkHH5s00ois9vt1Gq1dHZ2Zoxuu902phafyWazarVaGo/HJ7ULNzc3FtR4hnQ6rU6no1qtZkEEEHUZ7vl8brKC2WymZrNpgfhwOBhjnE6nT2Q5kjQajcyG0NnzuYHAsUB+MpkYw77dblWr1TQajdRut22uiiQ1Gg0Fg0G1220lEgmdn58bc49EAiYd2UKpVDIGnsCMHCKdTutwONaXUJA8n89t7gqtnYkTrPezZ8/06tUr7fd71et1i0HEJvabhKBerxuojsdj1Wo1ax6Azr5er1tzAdao3+9bLNputxqNRlosFubHrjxovV6bHIwZK7vdcbgpQZ0EmJcd6lxIxpCPUGfAyQRxmD8Mph0Ohye1L4AI811IlJBbSMfkwQUZ//qwy8dIHyN9jPQx0sfIP36M7Ha7/yMO/CwvXLw9SjK5AMHFZcoOh8cCYIp4ARfeImEOkDDwpkkA50geo3a16553LJpFM8zbKp/JhaQiEokYiHG8ydFrOBw2VgDGbbM5tjrle2FPADj3/gASt5NUIBCwWRgE43K5bG/IaLFdtm48HiudTlvgo+AWAKV1qXRktHCkq6srtVotPTw8WFtejlJhXlxmD4ddLBY6Pz/XYrGwWQeAIAwl09QpDgZIKXJMp9MW/Dm2PRwOBsS09U2lUsa+8T10vUH7jx6dQJdKpdRut43tg4UMh49zT0ql0slQSxhAJAIu40XR98XFhbEpFxcXury81MuXL80pA4GABSecG/vieWFzCHTdbtcGFcJIYzPh8LE7E4FpuVwqkUjo008/NaZ6tVqpUqno+vpaNzc36vV6ev78uTFY7MdqtbJ5NgAhCQJ+xXE6bYvZe3TiBKf7+3vV63UL/CRELitG4HSfgWBXqVRUr9f17//+75rNZiaXgXllH5FEjMdjZTIZJZNJDQYD89NGo6HD4aBisah0Om2zSIgRFNhmMhk9e/bMkiykEEhGttutJSPMGSKO0IIYfTj1DySJ+/3eCrUZTOrWdhDckSuwr7CL+P9isVAsFjPQhV2l8JcYQYLCM3IKgQ1RP0X8IVFjb4ghsLGsJdIb5t3AFuNnsH98DkwtwEjsRcrxPvbOv/7ny8dIHyN9jPQx0sfIP36MfC8O/GQkkaxYlIWANePBMUR+hpt1j9kJJpHIcVYIuvJ0Om3ODyvGGyUP6XmeFRput1trqZlIJCwwY9zM45BkAYU3VRga/g0tMtrYfr9/EiD5/lAopEKhYIDC0TCMAuwhx/M8QygUsuLRTCaj3W5nk7q3260BG+sFo0jBM8+52Wz04sULvXv3zoLYYrFQr9ez+QjL5dLYAhga1h/Q5pg5kUjYcbc7V4UOMNPpVJVKxcC40WjYkTNGTmEqoNfr9cxgk8mktXaNx+PG0EiyjlDofmFWo9GodVZCG08rU7fjkMvKhkIh5fN5O1aOx+Pabrf65JNPrF0vwZB7n06nOjs7U61W06tXrwzUCGDFYtFYTQIVDB+aZqQmAEgsFtO7d+8s0FNXQGF7LpczP4L1y+VyBkJISwjOX331lcrlsg0/RF+NTxDwsDUYokAgcNKCdz6fazQaSTrKmfC12Wymfr+vs7Mze/7VamW2xnR7/IR5NZ1OxxIZjvylR40z9wBDxn7B3sOIYdtv375VrVazuohut2t1JMPhUNvtVtVqVdvt1gaawoZOp9MTrXgqldJwODwZqBoOh/Xu3TvTwdO2ORgMms4dW4YZk2TDW0kiXfYabTuJEUCOxGq/39ugSlopS4/1N3QII3Zix9gcyXcsFjO2HX92Y2S5XLYOZCTLFI3ncjmbQ0KjBF4CSLphxrkHEhPu178+7PIx0sdIHyN9jPQx8o8fI1154Q+vn6wPwXExDv7wpsqxvGvoSCFoB+s6Jm/iMAQwFQQQ3rRXq5W9HXM0yKLwuXTQYVFgl1gYtwUkjBhv5TgQulNAkGF2vNlSbMnzwCIAPMhIYAR+//vfW/DFeAjABJJgMGhOQ7BHR0+XJknGkr5588bewul+A/C47CRv4qwnBk2gQrcMIIbDYQsIkUhEtVrNCoslmeOx/8Ph0L4XHTtH6YVCQf1+39gXOlfBmiIP4Kga4IFFwJ4Aj2g0qmazqXg8bqCay+WsRqJUKpkDuV1tXKnHdrtVpVKxYZlIcl69eqV8Pq9nz56pWCyazAHpDdIaSWafMLLUTBQKBauvoICXRAY7RHqRzWZNxw6rFovFrOYAECNYAuSAK1IeAg+fQwtk5EHYNwXZgUBA+XzekgLkO/l8XvV6XfP5XPf39+r1euZH/JxbCyHJ2E0YHthd2Et3lgxANJ1O1el0VCgUTFIEa83/Yg+TycSSwO12a4XR7roTPwiQ1JnQehqmeblcWjE7XaNg2RaLhUku8PVsNmsMNvGNYA97iiwFnbwbeF1tOMkvNg37zP2T4LCX1GPAVKOxJ8klxvDzyEhcNhnWn0Sf56VbG/UNrlzQ/Tz2mHjkXx92+RjpY6SPkT5G+hj5cWDk+1QgP/mFy33T5Kb4QhbJ1Ui6b4guU8ZbZigUsqNLjJ+NZIFgSHAaCuwAEY4GCYrFYtFmGNC2NB6PW8En7S05ZnSDKQXFOE8oFLJuQPl83gpWXd04OniOmt2jTN6ah8OhGSngyZH+bDaz41ECFcenFHi6hrjf79VoNFQoFOwomEJc1o3gPJ/P1e/3NRwO7TMxrnA4bDp51hMNcjAY1O3trQWzaDR6wlC5etblcmkab46ckZjAWBwOB2MUXbnFZrMxUPU8z+Z7kJSMRiMlEgkbYtnv91UsFq3gEyYVppHjb1ePy9E7yQ6SFZKOUOg4/DCVSun8/Fz5fN6kHev12jrmkBTxe7Cfu93OioMJAiQTAFmlUjG2zgXWaDRqkgtYmkAgYIkR7GoikbCkx7Vb6Viv4c4HIdHjHvFJgI92uqvVytquEjhJ1ggwHKkDXuv12nTzMJkAred56vV6xlBtt1urO4hEIsrn85JkGnBYTJhW1oyuS0yvJ8ARe5jzQic0l2WTZHUvSAmi0aglkrScXiwWlnTBzFF4L50OakQCxTO5sgLkTbB6JK4EbQroAUDiIUAO40e8YQ3wYYAIWye28D3sMTpy6gj4LiQ7rLuQ3MdaAAAgAElEQVQbo3lOEibWDruSZCylf/3hl4+RPkb6GOljpI+RHwdGui9lP7x+FkkhD4pBsdE8NIwdgOMypSyQ9N/fGAkifDYPRVDgD59JgAIE9vu9stmsnjx5ovF4bMwUOmeMmA3mHkkqAEV0nhxh73Y7K3jEgbgPjsNjsZgdVx8Ox5a7MDObzUbD4VBnZ2d2nIoswNXJugwFBsDxOKwULM5XX32lSCSicrmsZrNpMg7acB4Ox0JFgjvPyPfhgASvZDJphc7ZbFa9Xk+bzcaOWvldagFgTWCU6vW63QcMR7VaNZsgIKFXxwFZAyQAu91OjUbD1kiSHSlns1n1+31VKhVjMvh8gjxsSCQSMVkDaxuLHYdPoi9Ht42DvX79Wp7n2bA9WCc+EwYGBhY75TMIYrBWMG3D4VC1Wk29Xs+Yle32OOgSZo6EA3sheJBcUNwpyTTrADOyENi1ZrNpdowfwTrDDAcCAWNmuXcYTu6dQlY38aNoGH8G7AD9Wq1msiG3sBhZiyTTdlNbAgO53W6tpe1qtVK5XLbAmE6nzT5ZF+QIAJabvAJQgAXsPnUOsISwXdgTdSW73c5kELCX0pHRYnZJr9czoMC2WacfAjj7AYCNx2M7hXBPQ5LJpK0VdgVokCCy9/wbBemuvAyfp/B/vz/q74mVbvLtStI4FUG68z72zr/+58vHSB8jfYz0MZL98jHyjxcj33f9LHO4MEAWAmNzgcX9edgaSScPgENKMmaERWMD3DdLPo9jyP1+r36/r4uLC0Wjx0FqGBDtPjeb4zyFTCajyWRi3VLYLDSevOW7nZ/4b4CDDccIABJ6+u92O2UyGWOJkEHk83mbGs90dHfmAwGdz6bbEIDiskwwaJvNRs1mU8+ePbPOTJLMoAKBgHK5nAGL53k2rA1WE2ANBoMqFArWmhbw5bv4GRgNSSe1CAwJBNhhOw6HgwE6LALP4X4GYIO0gC5UABza2c1mY9KQq6srjUYj049TyLvf723/YUSRK/Az2CTyAoIyLKbneSqVSlZTEY1G7f45fmZ93WeipmEymVjyIskCmfvdBC7POxZDj0Yj04kXCgXd3d1JkgVRSdbBiEDgBv58Pm+BlWJwWGBA1fM89ft98zHuCRaRPabDD3UFrCOBkbk4JDlcvV7PNPkEfr6bfQkEAhoOh8ZYAjYu8xyNRlWtVq2FbqVSsUGdrB8yKbdYnqSOPSIJcBl1/BoJBLUK+AS2ht/w7OjDsetoNGrPRj0NCZt7eoGdo20H2NziYRhL/I2A7sZMmEvYWOInrHQgELDYwz3wzMRWPou/I1l3mUB+BukKNuxff/jlY6SPkZKPkT5G+hj5MWDk+0jJn+WFS3rUnrt/5/63eyMuM4MTACq8yaKf5WEJ4K7cgs1dLpcqlUrqdDpqNBoqlUrGyHBMfjgc1Ov1jIFbrVZqNpva7XbGxABUFDjHYjHbbIwSo6FAlLdz97kk2fH34XCw41xa58J08cx3d3fGpMDW4VQuo4gchM8MhUKq1Wq2Xv1+X3/yJ3+iSCRibMVkMtHhcDDmhhaj+/2xRarbAYjfIyAkEgnV63WNRiPrYoTMg/3E0FxWAAckiBwOB5vzstlsTD+8Wh07QBGocESAHRBCYkHrXIAX+0DL7B71TyYTSxzcRMWVdhC0KORlFgMSkO12a7UI3W7XAJdZEewDdklw5DN5Nob4EeAJSG79BQkTOnwSkvF4rFwup1QqZbYPSBCwOF6HCV+tHlscDwYDrddrm7nC52ezWYVCIds/umMhd4B5xvZ4VnwEIMnlcha8kDrN53MVCgVjdxOJhP07rJgb0Fhv9u5weGwEcDgcB6fe399rs9koFovZOlMQTWyggxkgQUvqUCikYrF4ot2nO5erNwdwJRnjS4JDnYZbwIzdMZcEe4Y1JTEFJHnW7XZr7GkoFDI5GPvjSo/wI3yYZ3DjL58Jg4f0iVjF2rtgz/fyu+7piisBkWTJuhvT/esPv3yM9DHSx0gfI32M/Dgw8n3Xz0JX8sUYOn/Hzblvh7x5AiSwVPw7hXwAkau9xQklGbvC98DGjEYjvXr1SqlUyhydQmKKA0OhkAUbOjOlUil7Q5d0coSN4RMEc7mcJpOJGo2GySY4ToUdwBnpXBQIBDQajazL036/19OnT7Xf79Xr9bRer60dJRsMW+eCAUfwFN1SsMqwvFQqpZubG+ue9ObNG7t/2CYABe2+WzgbDAZtcjfgSGErTAoaf6ak49wwKAQX9orj60KhYAABk8lROUHB1XgTPEKhkOmOATXXztDVl8tluw/pkeXADmEgIpGItW6FYaEmAJbP8zwLgi5gwZi5Nk/iARjw7DC4i8XCagoAdMD3h8HlcDjOU0kkEqpUKlZkTeEugWcymZjeWjpKOLBZ/ANwpBsRiRfAtN0eC6Lv7++tmBq5AgwSbCqMVzqdNp/E1lkTgI0kolqtmt7aZYIkWbLEHsBOA1gE+XQ6rUQiYbIBJDDo0rEN1hCWPxAIWOF5JBIxQIdhg+kjEeB5CdhcLlPmJkMULWOHdPmiTbZbDA4TGgqFTGokycAcv+SzXP074O0mrcQkAA0bBVT5HhdIeEZsGd92WUViC98B4HAP/K5/ffjlY6SPkT5G+hjpY+QfP0b+r55w8SAwIe4NuW+l7sXbKAYtyQpc2SAcWZItBP/O5xFkcc5oNGoD45rNpi4uLjQcDhUIBIzx4fiQ76SrDMzT4XCcLO55nnU7oktROHycO9JsNnV3d3cyRI833cPhYB2NCCZsANraYDBoBbmVSkWtVkvD4dAYA1iJwWBgTuWyVi5AT6dTa4lKofR+v7chgchF2A++H0kAQA5zxhE538F8Ba5kMqlisajBYGDHwgRMggaBhjXB8VOplPr9voEI7FYgELDAFwqFjDFDsgLTSOIQi8UM8DKZjLVinU6nJmvA4WC0sBGCpQv82ByFv6yjew+wawAZzgsLyXH2fr+3IMbsjNFopIuLCw0Gg/+/vS8Pk6o6039v7XtVV1XvNN00WxqMTKBZFW0WERTBUdGZmMw8M0GcRY2OCxJjBqNxJo6aZBLzB3mS0SQ6Y3xUxolKVJbEDcUAAiJgoBt6X2rp6qqu7lp/f/Tv/bjdQgPSRIHzPk8/SnfVveeee873nvOeb0EsFkNxcTH6+49mEOMYo2qZy+WELHhkTrU4FAqhuLhYsnzps05R7bFYLOjq6pIAc16T/aRXq1lng5mIaLSTyaTMLf11SfocQww6djgcMBqNogLS752gKsRxZzAYRFnl3Ovr6xPVjgsCji23243W1lZ4PB4AkOtwbnNucV7Q3QaAjGESP8meBJHP52WBwLHBRShtD0lS72rBtnOOer1eGRt0MQmHw4OMMf9LVyMuQLjIADAoaJh2Ue8awXFLktOrxbQ3tB/6e5J0Oc/0C3zOF71t1rul6Z9B4dSgOFJxpOJIxZGKI88PjhwOI1r4mB3JB+Wgo+pAI8UXwt0hXRS4U6R/rN5o8iiTHaUnFKfTKYpWdXU18vk8YrEYcrmcHBvTDz0SiYifNo0RDQ3Bf/NFJZNJ+X1jYyMOHDggR9h8eXSloP8vfY6NRiO6u7vlCJWBlX19fWhqakJHRwfa2tpkcNFVgxOZxovHouwTqlEMuqRC1dPTgylTpiCVSqG1tVWUOioAHHgckDx2526d1+U7CofDotoAEGPS1tY2aMJz4cDPABDF0Gg0SgAr3zX7x+/3y/31R8w0iFRsqU7GYjG4XC5RhmgYaDj4Of2k5/EzXQYYZ0AVzGAwiM86j8OpxtHg8D3kcjlRYemSozcqHo8HXq9Xsv8Eg0GZE4WFheju7hbiYKYfEjKNDJUhvl+mg6ViXFhYKCppPp+XY3e6zZBY+U5J7FwcOJ1Oqd1B5VHTNMRiMSSTSXGpcLlcopqSdEmUzPAEQPqexEh1lM/A8Ua1k6RrMBgQjUbFRpAEuJijCwRJkIvPRCIhxVH1BEmFFIAQAReAzc3NMkdpeGlv+vr6UFZWBr/fj+7ubhnXVFip8pE0+Qxut1vaT/tA9wwSBdVEqpRUhvVByLSFJFT9s3IhQyLjHKMrCV2s+Nz5fF4URPYLbSPVVc5PvXLHucc2c8yynRxrw6l3CseH4kjFkYojFUcqjjz3OXJYHjhJvhgW2WxWJicfWO96om8wj+uofpAc6LvNyczr8BhZryjpXRj4wKzoHgwGpZiZ0+mUnT2PLbPZrKgevDc7kgOaPshUPGw2GyoqKhCJRLB//374fD45mqaCw6NoAOKrygBRGhn2VTKZRGtrK8aOHYuOjg60traisLBQ+ouGWR+ozuNoPVHRf7miokIGeSKRgMfjQVNTkyyQUqmU9InX6xVSofHis1OtotHXp+9lulWz2YyGhgZ0dnYODKD/ryxQVdArCkajUe6XTqelzgfdIKg8sWI77wtAXE5o+Elw0WgU3d3dGDt2rAxuk8kkahmJTV+7hYuQoUbV7XZLlXKqNSRfXjsUCiGbzQqpcdzyOtFoFD6fD5lMRgjHbrfLIoaGkQsL+jnTTUc/rjmOSIT9/QN1Qnw+36DnonJK/3kuAKjIMaOS3s2B7gh0U2HRUwZ3k3xcLpf4+WuaJvENzITEe/C90/AARzNBsago/0aXCcaK0N2AY4oLSKrMXBzwndIu+Hw+KfbKucGgcfYjjT8Xo2wrXU8cDoeMfYPBIGREe8V3TOPucDjkmUnoehcGEjmfh+4v9IMnMbBtNOYkWADyPFyY0NhzLnGesl+ohA61hZwjHCMcyxxrVF/1qrpeWdQTHwmfNsvlckn7FU4diiMVRyqOVBypOPLc58jhcNobLj4wd/g0SnrwwfkCuNvl72iI+CL4wPpjPP2OlrtnAOIrTVWCfrnBYBBOpxOffPIJTCaTGBfuiPVH+wCEvDRNk0nMCV1RUYFkMonDhw9L7RAeTxsMBhlQrGXBIFiXy4VQKASDwSCBkjyKTyaT8Pv9iMVisFgsCAQC4iPPyZ9IJAZNMh7du91u9Pb2CqHQHaG1tVUMQUtLCyKRCILBoBhKAFKHhHUhOGk4kO12uxgYHqfSsDB4WNM0uY6e4D0ej7gUaJom2aVaW1vR19cnPuEMjOUEZB0Lt9stim5PT4+MHQZcdnZ2wm63IxAIIBaLIZ/PS0pYAJLmkwaZz5PP50XpoxHSK6I0CvpjZgDiomA2m6UODd83JyoNKI1ELBaD3++XSvMcy8yuRFJgnAIJiOou4xTYPwxC5nixWCw4cOAAxo0bJy4jzMhENYsBv1x8JJNJ6U8e6zc2NiIQCIhhaW9vRz6fFxcYvlsaKCpNdGdgTEMsFpNMP5lMRvzXA4EAQqGQjFWqYtlsVt4Jr693xaAy6Ha70dbWJv7lHo8HHR0dQiB6YmPbuBClneCxfygUQlFRkfQTyYnzn0H/vD9VZ96LixCOf96vra1NVEOr1Yru7m4hb45dvSJNm0G/dMaE0J2FpAoMqNBc3OkXMHT74L8Z2Ez7y0U9+7O3t1fayMUClWX60JOIqXZyEaI/PaGNVi6Fpw7FkYojFUcqjlQcqTjytDdc3OFxR8oJo5/Q/Lve+Oh3iHzxnOA8YuVxN9UwAPLAdMEwGAxipMxmM3p6elBQUAC/34/6+nr09vaioKBAiIJtZGfrXQf0Rom/pzLT1NSEgoICIUQOAP33jEYjiouLkU6n0d7eLultTaaBWgwGg0GKMjLIs6OjAyaTSQKYacj5O/rP610a+FKpsOmLJFKpbGpqkkHvcDhQXFwMl8sFn8+HlpYW9Pb2Shpbs3kgeJUB1DxizWaz4uaQTqeFJBnUSJWRAaZUOuh3rGmaGHwe+8bjcSGMTGaghkRhYSG6uroGqRs88o1EIjAajXA4HLBareIKYbPZ5JifRi4ajYrCQYNCA5ZOp+UInePRYDBINi19ACdwNJUzx47dbhcS4JF7JBIRw0GDQlcD/UQ0GAxieGlYaAD6+/vlexyHDFYdNWqUkC37hsGsVqsVFRUVQo5sH40E3S7ommOxWCS4mAseBndT4aWiSdXJZrNJNh+qhTQysVhsUGCspmlob28Xu2CxWERJ58KDhioWi4lLEhcKmUwGHo9HFlFFRUUynvQuHHRN0rtk6RVQBgsz3qOwsBAmk0nqo0Sj0UH2IJcbCJi2Wq0yBhhrwPEQjUZlUUPbQdvG0wAWGtW7YGQymUGKMk8DeCJA0uI74++6u7tFsafdZP9TpeV/AcgY4CkIlUYSJV0w2J96dxQ+D+cF3xPtOgC5Hm2DwqlBcaTiSMWRiiMVR54fHHlGN1xsAJUMviA+qN71QX/MSqNFBYrgRLRYLIN8JUk49IXn76kAsDp9Op3GqFGjYDab0djYiJKSEgns1bSBLEhUongdTjwqhlQHGbBbX18v6TR5L96bCl4mk0E4HIbX6xXDGYvFUFVVJek/eUydSqUwYcIEuN1u7N+/X8iSGXUYUDyUNKl0RKNR2dlTOQkGg0IoLAjo9XolkJZparu7u9Hb2ysGjeoUFTqqAHTz4N+prFHZpNLAI3VOQL0LCY+m9eREtYnuAVTq+DxcHLB9BoNB/PtJIlyIZDIZhEIhIWVOKqbyBSBB1CQWqnR023A6ndJOEgMAUUdpWPUTyW63o7m5GfF4HKWlpWLoNE1DIBAAMBD8HYlEEAqF5Djf7/cLCdlsNlEgOf71xOR0OmUBwraYzWYkEglMmDBhkHuJ0WgUZYqGls9rsQzUV6GKTLUbGDCUDEoOBoPwer3i9x4IBMT3uru7W+aK0WiUtNFMCU1yKCoqEhJqamqSxQndk+gOQuLt6uqShSNVKhI8a5JQeY7FYmJYaRtSqZSkV6bhdjqd6O3tlRgUALKgiEQiovTT955KVzablVgNzmmSQjKZlLTLVOFogDluGXfA0wuq6nw+2jaORS7MuAgjUVBlBiDvlYocYyBo3GmnNE2T/rVYLGLvuBD1eDzSR1TsueAn0VCp5+9pb0lw9FNX+GxQHKk4UnGk4kjFkec3R45IlkKSh16B07s0ABikzPHIX08y7FgOQKoCdEMgWbGTObBoQGlo9LUAGPTLoz4aNk7CoW4IdKnIZgcqx5OgPB6PqE5Mq6k/tqQaRUPlcDgQDAZlx+xwOFBZWYk//elPGDVqFPx+Pw4fPiwBegw0Zj8AEAPV29srLhp2ux1er1d8z3O5HAoLC2WX7/F4hBRpWPk8HR0dSKfTCAaDYlCMRqOof5xkNLJUaegy0NPTA4tloGBlOByW90bw/XCipVIpdHd3izGju4beDYZqC5+XihxJju43VF8YaMp3ro9dKCgokMnS29srxQqdTuegMeBwONDb2yspSvnOeA8eiVMl4335vHRHSaVSsmgh+erHYSqVQkFBgag1+v6Ox+ODAr3j8bgULNQf9bN+Bw0cYx5I/CQwqrterxeRSETefTKZRDqdlud3OBwyJrhIocEKh8O44IILZN75/X40NDTA6XQiGAxKxjBgwChSAeL/02BzrhcXF6OxsVHcJdi3LM5JdxwuMkh+drsd4XAYyWRSshgZDAYUFRWhsbFR7s9FD79PhZiuWVTrgsGgKOL0x2cK6lwuh0AgAKPRKOoxF4305+d4Zh9T7aNNonFPJpPo7u6Wtgx9No4huopxgUJ/c7pR6W0ixxkXX3QzIYnRVvKdklwJuntwvtHW6BcvfGd03+CYpLIHDCxQONbYfoWTh+JIxZGA4kjFkYojzweOHA4j4lKody/QH6txR07Dyx0xlRdOED4MFRsaaE4YHqFzx0wlg2TA66dSKbS3t8tOk5PRaDQiGAwiEomIsaf6QP/bdDotaT7p0kB3Abox0GByR03FymKxCPlw4ND3nMfyXq8XNttANfqysjLs2bMH/f39QlAkX7vdLll7mC7TaBwIrK2pqUFLS4sMFE3T0NHRIZXk6YLBCUiCDIVC0DQNZWVlsNvt6OzsFEPL98VjXY/HIxmG6CfNNhgMBpnoJCmm/dVnaGEQNtUiGlAGVfN90ZDS4Hd1dQGAZEPipKKqSENCdxIuHpgeVq+E8dn4fb5f+hdzIcL36vF4kMlkEIvF0NvbKy4hHIfA0QJ68Xgcfr9fxg0NFccqDRXHDwBEIhEcPHgQNptN3CSKiopkbNBtgUaEKgvdKJgxSdMGAmrpC81Ad5fLhVgsJu5CenVb34clJSWw2+1obGwUoxoMBsUlIRAISIYnujlUVlYiFotJtXsaGbaXRpdjNhKJYMKECaLCMZ0uXW4YKM96LnQXiUQi8Pv9iEajsFqtqKyslPkFQPqZ/tXRaHSQ+4JeaaU96OjoQElJyaAFLg0v7U5LS4sofVxIcNFDNxW2l5sOqnNcgJEouajgvKebjtlslkxOvAZPJTif9H7oNPK0DZnM0QKYdMPhe6XdpfsSg6+pPAKQuAP2D+0e3SL0xMa/sb0Gg0HmOduhcPJQHKk4UnGk4kjFkecHRw6HETnhYnYhNo43pdHm7pHKAI9zSTS8DicnH8BoNMrgNRgMg47K6UphNpsxevRoCTzlzhsASkpKZGfN63PHSgNhNpulo/jy6eOZzWYRDAaFmEgwVAb5vHwJJCgaX6oW/C4H2JEjR6RYXXl5OUKhEIxGI8aOHYu+vj50dXXJIOJEYkYf+hXTb17TNBw+fBgVFRUoKiqSjDWBQAB9fX2iYjGYlEoGB2EuN5BFKB6PS2V1/aQkOBipitBXl8addU7oYsGJz75n9iUOdA7Wjo4OUf1cLpcQC5UcTiAOcC466OPP/u/s7ERRURFCoZAojvQfZ0FEZvyhomgymSRTEOMYGHzKdtOw0V2Dbi9UQTneOeY5Gfl7TdNkQo8ePVrcVeifzkxUVAtzuYE0zSS94uJiZLNZUQG7u7sRCASEROjTTb9uGjoSWTabFZXzyJEjyGQyaGxsFII1mUwIBoM4fPiwZIzinKioqJCUx2VlZTCZTJIRqr29XRaLnB/hcFjGfyKRgNvtFneRZDIJr9c7KIaFBp5qI6/J7Gh0iXA6nejq6oKmaRJA73K5UFRUJP77JMN0Oi1jk+p5S0sLSkpKpF6LnoB4QkCbwZgQjmO+Wy6m0uk02traxKgXFBSI7UulBjJ9MZscA/ypkNPYk9D47CQHqo/0+6d7Be8NQEhP71bFBT2VX9pUxhdwAaJPU0yCyuePZnwigZFMLRaLjGt+l/dXOHkojlQcqThScaTiyPODI9mOY2FETrh49Ks/LmWwLTCwy+dRInfFVHroG0rC0TRNdo36z/C6BoNBBqR+EUBDyHSb7Kyuri7U19fLYO7q6hrkD55MJhGJRGRw2Gw2MRj5fB6RSATjxo1DPp9HS0uLqDrcdfMaDQ0NKCwsRE1NDbLZrBST5GSiUkjyjcfj6OzshNVqRUFBgbShublZVD/6clPJiEaj0jaXyyXH/u3t7YjH4ygqKkJbW5v4vtKAZzIZ9PT0oKKiQgYYYwTcbjd6enqQSCSQTCbR3Nw8iOzpegFACB2ABIpS/SEpsR/j8bgMeA5AxgtQMaTbhc/nkwnHoFS61uTzA/ViUqmUZL4Kh8NSG8ZisYgi2N3dDa/XK8+r973leLJaB+phsF1UuDRNkzS+Ho8H+Xx+UBB3QUEBwuEwUqmUkAoXFuwfAKIS8nibvs0AxDBTvWTwKlVFs9ksCwCOb2BAUWJhwmAwKK4iudxAgCn7mnOOClkymURXVxeKi4uF0OhWwcVER0fHoMruDKqne47RaERHRwc8Ho/UwgkGgwAg/twMZqU6ZjAYpO+oMPL5qbjq3XLi8bgc5VN9DIVCiMViKCkpgdPplAKlBQUFEiuhL1jJRUIymURLSws0TZOFk9VqFdWbBp8+5Vw00deebdGrdsBRlwH9uLbb7UgkEhKQ7/P5RN3ngognDVRXqTzSzYexByRYo3EgroUnBZlMRjKWcSFHO0gFkmOSsQyMA+CCQh9IThvNGBO6b+hdOfRjWp9KW/95hZOH4kjFkYojFUcqjjw/OFLvRjwUmt6H/LOgsrIyv2bNmkFHgPzhYOKDUYU71rGbnmj0fuP016SSxPZyR8odMR+aHctBRMPKbD083qSySEWRR+E0+vRHNZkGCg9y8FosFvG/5n17e3sRjUbhcrng9/sHGeBMZiANqNlsHhQY2dPTI6RrNBql/kFvb68YP6o5LpcLpaWlSCaTUpySWXEMhoEsTDabDaNHj0ZLSwvGjx+PtrY2IVoOGr/fD7vdjmg0KsqL1WoV1waqayRs/QIhm82KQql/J/pgYRpF4KiSy/dts9kkswyJgP/Wq6A0SLwfB3A+n5caFfn80exOnKRG40BtBbaf7zOXO1q/Q99n/KHxo2uO3i+eR+fM6sTJzTHHY2q2h+OFSrXRaBT/bR6Ns5/1ag2JjosvLjqo8HDik4hk8mpH/Y6pxgGQNK10MWFQJ/tB78rE5+eijsG5XCQxuJSuDel0Woy2/jm4eKOSpo9P4Xii8aPaT7cQ+j739vaKewjnH0mTY4WuNOl0GrFYTJ6P96HqBhwNsCah0aecizW+T44dziW2k4sMuhNRMeY1+NxUH9n3emWci1IuYuhzPtRXne+C9lLvMkE1Ta/0cbzy32yvfm5omiYxNjwpoU3WtyGdTsuCj/dlnAvHC/+Wy+XwjW9844/5fL72ZPhBQXGk4kjFkYojFUeeLxy5du1aNDQ0HDOY67Q3XLW1tflt27YdvaB21Jf3VKBX9BQUFBQUvpgwGAxqw3UKUBypoKCgcH5g+vTp+OCDD4654Tptl0IqLVQxuAvUqxp66P9NBYi7T72CNxI4VYLiZ/XPoW/zsZ5n6HdHqu36a57oukPbdTYQ8/HaOPQ5h35mJPv3z4HhxsypXAM48RjQQ9+/n/X+Q8ff8eb0Z4X+WiczFo7XvuONmZNp58nOkxMtks/E/B/p6+uVvRNdayTf8/kOxZGDv6s48uSgOPLUrgEojjxe+xRHnvw1Tpcjh+uvEUmaQforhpQAABkySURBVMMbiUSwa9cu5PMDmYSqq6vh8/nQ2dmJ+vp6lJeXo6KiYlAcQDgcxscffwwAGDduHEpLS4/7kCdjJId+5lhkdrxr0F0AOHrseaxrHOv7PHo/FZxJw3g2GN2TaePZ8BzDYSTa/1n76XTvPdSA6slEf20e03MhONyCS/8zNFZF/zn9onK4+TfUMOrbeqy2HMtI0i1C/3Oy4P30ftv6hejQPhvu2voAfP0z6f92sm3kQp2fZTv019XbboUzB8WRA1AceepQHDly11AcqThyaLvOBEcOd98RKXzMRn788cf42te+BgDwer2YNWsWVq1ahX379uHf/u3fsHLlStx8882SjrS7uxvf/e538fLLL8NoNKKmpgaPP/44xo8fP8gf/VgDQv+C9INz6GAFBr+IoW3Wo6urC5s3b4bVasW8efOk7sOxnvVYbdD/fqiKqYf+O2e7sVQ4t8GxyyDrZDIJl8slWY6y2YEq8qzzQl/x482bdDotgby8TiZztMghv2e1WiWm5FjIZo9Wqrfb7YN8+xmDwsxnvAb9xZlNi+2JRCISKK0Pjj6ZvtG0AV//rq4uZLNZSV/MNuqDw/VFQ4cuePULWf6NwcTZbFaKkzIOZzhVmzZHH0/T398v2ZhIIgyoH/p9hZGH4kjFkQrnJhRHDt83iiOPYkQ2XEQ2m0UoFMLUqVMxc+ZMPP/888jn8/jSl74kGXa4qzQYDHj++efxzDPP4LrrrsO4ceOwZs0a1NTU4JFHHpEsPqFQCGazGUVFRZK1hLUUfD4f3G43Ojs7JT2owWBAc3MzNE1DeXk5+vv70dbWhnw+j+LiYkkn2dnZKVXr8/mBYMwdO3bg0UcflcDeqVOnSu0HPfL5gcxMDOItLi6WQRUOh6U+g81mQ1dX16Dg20QigWAwKLUnFKEofFGhJ4Bdu3bhlVdeQVNTEyZOnIjrr78excXFePfdd/Hyyy8jkUjgy1/+Mq6//noUFBQMGte8Tn9/P3bs2IFXX30VHR0dmDJlCpYtW4ZIJILXX39dMjWlUinU1tZiyZIl0hb9gi2bzWL//v3YvHmz3HfOnDnQNA2bNm3CW2+9hd7eXixYsABLliyRFMgdHR14/fXXsWzZMhQWFiKZTGLTpk149dVXYTAYMGPGDCxfvlzIYLh5ybakUils2bIF69evRzabxZQpU/DVr34VdrsdH330Ef7whz8glUrhK1/5CmbNmiXXHopwOIzXXnttUErwCy64ABdeeCE++OADvPnmmzAYDKitrcXMmTM/lZ59qCtEe3s7nnzySUyfPh3z5s3Dm2++iXfffVeKmFqtVlx66aWYNm2askF/RiiOVBypcO5AcaTiyFPBiG64mNVm3LhxuP322xGJRFBfX49gMCi1Rvi5/v5+vP766zAajVi9ejWKiorw3HPP4cCBA8jlcmhtbcUvf/lL7NmzBy6XC0uWLMGiRYuwb98+/O///i8aGhpwwQUXYMWKFdi4cSN2796NO++8E1arFQ8//DDGjRuHm266CS+88ALeeustZDIZXHzxxbj22mvR0dGBxx9/HIWFhYjH4yguLkZhYSFee+01HD58GFarFY8++ijuvvtuzJ49WzKeEAcPHsQvf/lLfPLJJygtLcXVV1+N6dOnY8eOHXj++efR1dWFWbNmYerUqdiwYQMOHz4MTdNQUFCA1tZWXHbZZbjxxhsHFQxUUPgiIp/Po6GhAT/5yU9w6NAhjBo1Cj//+c/h8Xgwf/583H///bBYLKipqcG3v/1tJJNJ3HHHHRJrQuRyORw8eBA/+MEP0NLSgrKyMvziF7+Aw+FAdXU1WltbkUgk0NzcjA0bNuD222/HkiVLJH5Fr463trbi4Ycfxu7du1FUVIT/+7//wx133AGj0YhHH30UpaWliMfj+P73v4+qqipMmDABmzdvxquvvopXXnkF8+bNQzAYxM6dO7FmzRqUlpYiGAzihz/8IXw+H6688soTGlgq9Hv27MHtt9+OsrIyjBkzBv/+7/8Oj8eDWbNm4cEHH8Thw4fhdruxYcMGrFmzBvPnz0dvby/279+PYDCI8vJymM1mHDlyBN/61rcwbdo0FBUVSQ0Zm82GNWvWSJrr1157DQ888ABmzZol9rW4uBilpaWS3SuXy2HdunV46KGHcOedd2LBggWIRqNobm6GyWTC3r17UV9fj7KyMiEThT8PFEcqjlQ4t6A48vj9ojhyMEZ0w8VjukwmA4/HgwkTJmD37t3o6ur6lN9oIpFAU1OTFFoEgNtuuw1erxfpdBpPPPEEfvazn6Gurg7bt2+XQojPPfcc3nvvPVRUVOBXv/qVHEW++OKLmDt3Lux2O55++mn88Ic/xBtvvIF7770XXq8XALBx40aUl5cjGAziN7/5DcxmMyZNmoS6ujoYDANV50mI+mNWvV9nf38/HnnkEfz2t79FXV0d3n77bTQ2NuKuu+7Cj370I+zduxdVVVX4r//6L+zZswcffPABuru7paBgOp3G7t27sWTJEhQXF49k9ysojBj083X37t3Yt28fVq5ciauvvhr/+I//iN///veoqqrCoUOH8NBDD+GrX/0qNm/ejF//+te44447PmWIqQDu378fd955Jy6++GKsXr0a7733HhYsWIAHH3wQZrMZP//5z/Huu+9i/vz5x/XD7ujowPbt2/G1r30NM2bMwL/+67/iD3/4A7LZLKxWK9asWYNcLoe/+Zu/wdtvv42qqio0NjZi586dUoQ0k8lg/fr1SCQS+NGPfoRcLodVq1bhjTfewKJFi2A2m4clFJ5APPfcc2hvb8ezzz6LiooKfPTRR1i/fj3Kyspw4MABrFq1CqNHj8ZDDz2E7du345JLLkFnZyd+8YtfYM6cObjmmmvEJcJgMGD58uXw+/0IBAKoqanB1q1bYbfbce+99yKTyeDee+/F1q1bMXv2bDQ0NODJJ5/EZZddhssvv1xS2r7zzjt48sknB6Vg/su//Etce+216O3txdq1a5FMJjFt2jQAyp3wzwnFkYojFc4NKI5UHHmqOCMR0/o89Xqy0Af56StD099yy5YtePfdd9HX14dnnnkGU6ZMwWOPPYa77roLEyZMwJ49e7Bz504sWrQI3/3ud1FdXY0NGzZg/PjxGD16NF566SX8z//8DwoLC7F06VL8/ve/RzgcRllZmbg07NixAwaDAU6nEwsXLsQjjzyCm266Cddffz3+6Z/+CWPGjMHEiRPxne98B7W1tYMUNpLghg0bcNFFF+Gxxx7DP/zDP6C0tBTvv/8+3n77bSxYsADf+973MHbsWOzevRuJRAKLFy/G5MmTMXHiRFRUVKCxsRHxeFwtdBS+8Mjn81JksrS0FD6fD5MmTUJ9fT3S6TRGjx6Nd955B8888wySySQWL16MdDqN/v7+QT/JZFKKmFZVVaGoqAhVVVVoaGhAV1eX1OrYsGEDysvL8ZWvfEXqceivk0qlUFJSgrvvvhtLly6VehwOhwPhcBjl5eUoKChAdXU1ysrKsG3bNpjNZqxcuRJz586VehpUE/lZr9eLmpoaHDlyBKFQSJ79RNi/fz8CgQCqqqpgtVoxZ84cHDp0CFarFatXr8bChQulbo3P5xMVf8mSJfjyl78sfvKHDh1CPB7Hli1b8Oyzz2LdunUIhUK48MILcd9996GyshI7d+6E2+3GqFGjAAAlJSVYvHgxJk6cKCcMkUgE//Ef/4GioiI4HA6pn8TaIqFQCG+++SYmTZqEyspKAGrD9XlAcaTiSIVzA4ojh4fiyKMY0RMuYIAw+vr6sGPHDmzduhV+vx8lJSVSMK2lpQUWiwU2mw3jx4/H3r178cknn8Dj8eCpp57CFVdcAaPRKGTECtChUAjl5eVydJrJZKSIXFFREWpra/Hiiy+ip6cH1157Lfx+vxR3mz9/PsxmM5588klEIhHk8wMFAufMmYPp06dLZzudTqlw3t/fj6amJhQXF+PAgQPwer0YP368PCcnTDgcRmNjoxT4Y+Vt7vwNBoMUUzQYBqpcUyFUUPiiY6iCZjKZUFJSgnA4LIZx586d6OnpQSwWw+zZs/H8889j165dsNlsoujb7Xa0tbVJsUaHwwG/34/u7m4kEgkYjUY0NjZi165dWLp0KSwWC3784x8jHo8PytjmdDpxww034Otf/zr27t2LX/3qVwgGg5g3bx6efvppyTrk9Xrh9/vR1NSEXC4Hm80mxUOHPhf9tQOBAA4ePIienh6UlJScsF8ADCqoaDAYMGrUKEQiETidTvz1X/81tm/fjmeffRZVVVWYM2cOTCYTfD4frrrqKvkeMFCA8pJLLsGiRYvQ19eHW265BbW1tbjlllsQCATwu9/9DuvXr0dRUREmTZqEfD6P0tJSlJWVyXWy2Sz++7//GwcPHsSDDz6Im2++eVA6cQDYu3cvWltbcfPNN4vNUrbozwvFkYojFc4dKI48fr8AiiP1GPGkGalUCu+//z6am5sRjUaxcuVKGAwGJBIJvPTSS9i9ezeAgeJgV199Nd566y3cf//9sNvtcLlc+Nu//VtYLBbceOONWLduHb75zW8iGo3C6/Vi6tSpqK+vx6uvvoqdO3eiqakJ11xzDUaPHo3LLrsMzz33HJLJJK644goYDAbMnz8fL7zwAt555x3Y7XYkEgnU1tYin89LlWg9SkpKUFVVhffeew/3338/ZsyYgauuugrf/va3UV1djSeeeAJOpxNLlizBiy++iNtuuw1/+tOfUFVVhYULF+Lw4cPYtGkTDhw4gEgkgosuugjbtm1DKpVCX1+fqBishq2gcDZAr2JlMhm0tLSgtLQUH374Ierr6/Gd73wHY8aMQX19PZ566in8/d//PUpKSqSSPDBgLMPhMICjKngoFILf75fg+K1bt6KnpwcLFiyA2WxGMBiE0+kcNE/tdjuMRiP27duH73//+wiFQviXf/kXTJw4UYgEGEi/HQqFUFlZCZPJ9KnnYGICkkpfXx86OzsRCATg8XiknScCr0GiOnLkCAKBAAoKCrBjxw5JbnDbbbdh/Pjx0DRNskcZjUZYrVbkcjmMGTMGd911F2bMmIHW1lZYrVa0traiq6sLPT09Eouzfv167NixA5MmTUImk0EqlRL3rra2Njz11FOw2Wyor69Hf38/tm/fjjfeeAN1dXVIp9PYvHkzHA4H5syZczpDQuEzQnGk4kiFcw+KI48PxZFHMaIbroKCAixevBg2mw1lZWX4i7/4CyxevBg7d+7EvHnzxEWAO82ZM2fi4Ycfxvr16xGNRvHAAw9g4cKFMBqNuOWWW+D1evHhhx9i8uTJWLp0KaZPnw6/34/CwkIcOXIEV111FZYuXYpAIIBZs2bhuuuuQyqVwsUXXwxN03D55Zfjsccew8aNG5FOp3Hvvfdi0aJFiEaj+PrXv47JkycP2sUHg0HcfPPNCAQCaG9vR01NDdxuNyZPnoyKigrk83lYLBasXr0aFRUV2LNnDy6//HKsWLEC06ZNQyAQwEsvvYQjR45gxYoVmDFjBqqqqjB27FiUlJQgk8kgHo+jq6vruJlYFBS+SNA0DWVlZTCZTGhra0M8HsfHH3+MUaNGIZFIIBKJYMyYMZgxYwasVit27dqFefPmYe7cuYOuk8vl8MYbb2DTpk04dOgQRo8ejSNHjqCiogJ+vx/JZBIbN25EcXExJk2aBJvNhhtuuOGYqV1jsRieeOIJ9PT04Fvf+hZmzpyJXC6HiooKvP/++wiFQgiFQmhtbcWyZctgNptFSaf6bzQaMW3aNGzfvh2HDh1CNpvFxx9/jBkzZiAQCJx0/9TW1mLjxo1oaGgQ15HKykpkMhk88cQTMBqNuPvuuzF16lQhtebmZvznf/4npk+fjuXLl8NsNuPHP/6x2ERmcbPb7Vi3bh3279+Pxx9/HPPmzcOLL76IhoYGaJqGjz76CE8//TTq6uqwcOFC5HI51NTUoK+vD9u2bUM+n0dTUxMOHTqEhQsXIhwOY8uWLaipqVHuhJ8TFEcqjlQ4t6A4cngojjyKEdlwsUETJkzAD37wAxiNRlHj7HY7Zs+ejYkTJyKbzUpaRofDgWAwiGXLlmHGjBlIp9MoLi4WI1teXo5//ud/lrSyfr8fRqMRU6ZMQVVVFZLJpOTz1zQNfr8fq1evBgAEAgFomga3243rrrsOdXV1yGaz8Pv9cDqdcDgc+OY3v/mpegIGgwEzZ87EuHHjkEqlEAgEYLFYcM8990iAsKZpqK6uxq233opoNAqbzYZgMAij0YipU6di7Nix6O3thdfrhd1uR2VlJSwWi/iJ5nI5ZDIZGbBqwaPwRYQ+feqFF16IyZMnY926dXjllVfw4Ycf4r777sP48ePx7LPP4p577sHkyZPx4Ycf4oYbboDNZhO/cSKfz2PKlCmYPHkyfvrTn+KFF15Ac3MzrrzySgQCAezbtw9//OMfMXfuXBQWFoqN0JMJ586mTZuwfv16TJw4Ea+//jp+97vf4aKLLkJdXR3eeecdrF27FrFYDHa7XRaW+XweyWQSiUQCwICbw/Lly/Gb3/wG3/jGN1BaWoq+vj4sWrRI4mWGcyPgvF2xYgV+/etf46abbsKECRPQ2NiIW2+9FVu2bMFrr72GKVOm4Le//S1efvll1NXVYe7cubBYLKiqqkJhYaEQ3JQpU3DffffB7XZj//798Hq9WLBgAT766CM8/vjj8Pl8AIB4PC7+6U6nE2PGjIHf74fBYEBJSQkeeOABpNNpOS25+OKLsWzZMuTzeWzduhVHjhzB3/3d38Futyt3wj8jFEcqjlQ4t6A4UnHkqcK4du3a07rAunXr1q5atQqapsFkMsHr9cLj8cDpdMqu2WKxwOv1wufzyY/b7RY/bY/Hg4KCAvk8f6xWq1yLD81iZG63+1OV7vXF5giTyQSn0wmv1ytpd00mE2w2G8xm86cqf9NP3ePxSPE3h8Mh2ZkIq9UKt9sNp9MpPqA8tmTbeBxKf3yr1Qq73S6F2RSRKHzRoWkanE4nqqqqoGkDdXSWLl2KZcuWYdy4cfjSl76E/v5+9PX14corr8Stt94qxVCH/rhcLlRXV4sv9/Lly3HFFVegoKAA4XAY8XgcK1aswJgxYwbNS/38zOVyqK+vh9FoRElJCVKpFFKpFMrKynDRRRehsrIS8XgcPp8PK1euxKxZs2AymWA0GhGNRuHz+bB48WKxCTU1NYhGo/B4PPirv/orXH755YPs0Ing8/kwZcoUdHZ2wmg04oYbbsBVV12FpqYmmM1mFBYWSjBzVVUVqqur4Xa7UVNTI0HEAFBTU4OysjK0tbWhpKQE99xzD2bPno3KykqYzWbs2bNHYm+uueYaOBwOeDwe1NTUYNSoUbBYLDCZTHC5XLIIfv/993HppZeirq4OACQ+4MYbb0QgEPjMtUUeeOCB1rVr16475S+ep1AcqThS4dyF4sjhcb5x5M9+9jOsWrXqgWOOldPNL19bW5vftm0bgGNnLNGrACeL431nuN8Pvf/JXmNoZ56orafathNdS0Hhiwx9IcVkMolUKgWbzSYV7rPZLBKJhKSbdTgcgxZ4Q6/DIP5MJjPoOplMBolEAg6HY1C1+WPN7WQyib6+vkG/47V4/VwuB5fLJWlgAaC3txd9fX3w+XyD6nH09PSIWqhfoJ5s3+RyOcRiMUm/zar1/f39g9pot9tht9vlO3rCYjpsJgzgYhsAEomEqI4ulwtOp1Ouqb+O3gblcjl0dnbC6XTC6XRC0zSJj3G73aJQfhYbZDAY/pjP52tP+YvnKRRHDv/7E11LQeGLDMWRJ+6b84kjp0+fjg8++OCYXxrRDZeCgsK5B72B0mcW49/0gbj6tNbHuo7+Z+hnhxrGk7kGwWuxPfrf6ZU/+qbra6gMve+pGFi2Y2gfHK+N/NuxwPbp267vF97veC4OJ7MwZpHM04HacJ0aFEcqKJzbUBw5fN+cTxw53IZrxNPCKygonFugkR5qxGi8TtY4HUvVG+76w13nWAb/RO3Rf4/31DRt0Oc/i6I+9BrDtXG4exyrf4f2y/H68GShYrYUFBQURhaKI0/cHsWRasOloKBwEjhVw3imrnOizw3392OR2OliJF2ehiPIkbqegoKCgsLIQ3HkZ2vP6V7rbOJIJXcqKCgoKCgoKCgoKCicIZx2DJemaZ0ADo9McxQUFBQUvuCozOfzhZ93I84WKI5UUFBQOG9wXH487Q2XgoKCgoKCgoKCgoKCwrGhXAoVFBQUFBQUFBQUFBTOENSGS0FBQUFBQUFBQUFB4QxBbbgUFBQUFBQUFBQUFBTOENSGS0FBQUFBQUFBQUFB4QxBbbgUFBQUFBQUFBQUFBTOENSGS0FBQUFBQUFBQUFB4QzhtDdcmqbNGomGfF7QNG2mpmnFn3c7Pgs0TZt1Frf9Ek3TCj7vdnxWaJo2+yzu+0vP8r6fe7b2PXD297/CqeFs5kjFj58fzmaOPJv5ETj7bfTZzJFne98PB1WHS0FBQUFBQUFBQUFB4QxBuRQqKCgoKCgoKCgoKCicIagNl4KCgoKCgoKCgoKCwhmC2nApKCgoKCgoKCgoKCicIagNl4KCgoKCgoKCgoKCwhmC2nApKCgoKCgoKCgoKCicIfw/kWFA/Wu0KJ0AAAAASUVORK5CYII=\n",
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {
+ "tags": [],
+ "needs_background": "light"
+ }
+ },
+ {
+ "output_type": "display_data",
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAA1wAAAFDCAYAAAAu+g+jAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAgAElEQVR4nOx9ebxkV1X1OvXqdaczJyQkBAIhJjIqIjP4McmMCCICn8woyCwKCEKYEUGRWWSIgIg4MAUhIHyCgAhBGQVMMAlJCEkgA+mETne/96rqfn/c2vVWrbfOrXrpF+hh7186r+rec/bZe5+99zp333NvlaZpkJSUlJSUlJSUlJSUlLTx1PtZC5CUlJSUlJSUlJSUlLS3Ul5wJSUlJSUlJSUlJSUlXUOUF1xJSUlJSUlJSUlJSUnXEOUFV1JSUlJSUlJSUlJS0jVEecGVlJSUlJSUlJSUlJR0DVFecCUlJSUlJSUlJSUlJV1DlBdcSUlJSUlJSUlJSUlJ1xDlBVfSbk2llHNLKTtKKdvo3zFXk9djSylf2GD5HltKGY7lurKU8s1Syq/N6HNwKeX1pZTvj/udPf5+xPj8uaWUi0spB1Cf3y2lfJa+N6WUb5VSenTsFaWUd2+kfklJSUlJuweVUn67lPKVMW5cVEr5RCnlVzaA77tLKa/YIBln8hrj11VjPS4opby2lLIwo09V91LKS8Y8H0rt++Njx5FcTSnlttTmhFJK/hht0k+F8oIraU+gBzRNcyD9u/BnIUQppV859aWmaQ4EcCiAtwD4h1LKoRUemwB8GsDNANwHwMEA7gDgMgC3paYLAH5/hkjHAHj43AokJSUlJe2RVEr5QwCvB/BKAEcBuD5avHngz1KuXaBbjHHzLgAeBuDxtYZz6v5jAC+dceH2YwAbcmGZlLReyguupD2OSimHlVI+Vkq5pJRy+fjz9ej8Y0sp3yul/KSUck4p5RGllJsAeCuAO4wrZFvHbTeXUl4zvtv0o1LKW0spW8bn7lpK+UEp5bmllB8CeFeXXE3TjAD8LYADAJxYafZotGDxG03T/E/TNKOmaS5umublTdN8nNr9OYBn1y7cxvRnaAGmdiGYlJSUlLSHUynlEAAvA/DUpmk+1DTNVU3TrDRN89GmaZ4zbrN5vFPiwvG/15dSNo/PBZY9a7x74qJSyuPG554I4BEA/miMjR8dHz+mlPLBMc6eU0p5xvj44WNeDxh/P7CUclYp5dE1Xl3UNM1ZAP4DwC9dXd3H9C8AlgE8smO4vwHwi6WUu8ySKylpoykvuJL2ROqhvfi5AdqLlx0A3gwA4214bwRw36ZpDgJwRwDfaJrmdABPwvhuVNM0cSHzKgA/jzbZnwDgugBeRGMdDeDw8VhP7BJqXFl7HIAVAOdVmt0DwL80TbNtho5fAfBZAM/uaPMhAFcCeOwMXklJSUlJey7dAcB+AD7c0eYFAG6PFstugXbHxEl0/mgAh6DFuN8B8JellMOapnk7gL8D8GdjbHzAeKv6RwF8c9z+VwE8s5Ry76Zpfoz2btQ7SinXBvA6tBj7HsdrlmKllBsD+D8AztoF3QGgAfBCAC8upSxW2mxHe5fsT2bJlZS00ZQXXEl7Ap1SStk6/ndK0zSXNU3zwaZptjdN8xO0yZMrViMANy+lbGma5qKmab7jmJZSCtqLqD9omubHY16vxPQ2vRGAFzdNs9Q0zY6KfLcf3zHbCeA1AB7ZNM3FlbbXAnDRnHq/CMDTSylHVs4HwLxwvFUxKSkpKWnvo2sBuLRpmkFHm0cAeNl4x8QlAF4K4FF0fmV8fmW8m2IbgBtVeN0GwJFN07ysaZrlpmm+B+AdGGNj0zSfAvB+tNvj7wfg966GTl8rpVwF4HS0xcW3VNrNozvGcv0zgEsA/G5Hs7cBuH4p5b7rEzcpadcoL7iS9gR6UNM0h47/PaiUsn8p5W2llPNKKVcC+DyAQ0spC03TXIV2P/iTAFxUSjl1XEFzdCSA/QF8NS7o0G5L4AucS5qm2TlDvtPGd8wOA/DPaKt1KKVcv9DLPsZtLwNwnXmUbprm2wA+BuB5HW0+DuAHuHqAl5SUlJS0+9NlAI6YsX38GEzvrDhvfGzCQy5atgM4sMLrBgCOoULnVgDPR/v8VNDbAdwcwLubprlsTj2Yfnk8/sMA3A7tVnyMX4YRuPkIzKc700lo7/bt5042TbME4OXjf0lJPzXKC66kPZGehbYyd7umaQ4GcOfx8QIATdN8smmae6K9sDkDbWUOaO8IMV2KdjvizeiC7pDxg7yo9KnSeJvgkwE8qpRyy6Zpvs8v+xg3+1cA9y70BsIZ9GIAT0C7raNGL0ALhvvPK2tSUlJS0h5DXwKwBOBBHW0uRHuhFHT98bF5SHHufADnEC4e2jTNQU3T3A+YbJ9/O4D3AHhKKeWEDl71QVv6J7T6vWh87L6Em3+H+XRnnv8P7fbEp3Q0exfal1w9eF5Zk5J2lfKCK2lPpIPQXihtLaUcjvaiBABQSjmqlPLA8QXNEtptE6Px6R8BuF5svxu/5OIdAF433ouOUsp1Syn3vrqCjfe3n4zp58CY/hYtmH2wlHLjUkqvlHKtUsrzSyn3M/zOAvCPAJ7RMeZnAXwbwGOurtxJSUlJSbsnNU1zBVpM+ctSSuzyWCyl3LeU8mfjZn8P4KRSypGl/YmRFwF475xD/AjA8fT9PwH8ZPzCqC2llIVSys1LKbcZn38+2gurx6N9wdN7yurbAZXXPPQqAE8opRytJ+bUXekFAP6oNtj4Tt+LATx3nXImJV1tyguupD2RXg9gC9o7VKeh3QYY1APwh2grez9G+2zXk8fnPgPgOwB+WEq5dHzsuWirYaeNtyf+K+r72tcj3/1KKb+oJ8bbGe6B9s7b/0P70ov/BHAEgC9X+L0M4+0WHXQS2pd7JCUlJSXtZdQ0zV+gxbaT0D6ndD6ApwE4ZdzkFWhftvTfAL4F4GuY/xXofw3gpvSc9BDAr6F9Acc5aLH2ZACHlFJuNZbj0eN2r0Z78fU8x2tO3b6F9tGA51TOz9Jd2/8HWlztor/H/M9TJyXtMpWmyd98S0pKSkpKSkpKSkpKuiYo73AlJSUlJSUlJSUlJSVdQ5QXXElJSUlJSUlJSUlJSdcQ5QVXUlJSUlJSUlJSUlLSNUR5wZWUlJSUlJSUlJSUlHQN0bw/JFelUkr1rRulFACAezGHO1dKmXzv6uvaz3su+CrpuHGM5ah9dny79JglQ1ebq9N2PeT4sr57G/E87c16bjTtqq1m9dfztThbz3hBmhscz664nhXrtXFr1JUPrk4erOUm5TEP70q7S5umObLaIWmKEiMTI/dkSoy8epQYObutjlujPQ0jm6axAs91wVVKuX3TNKfVzi8uLs4lmJ7r9XoopWA0GmE0GqHX660xymg0QtM06Pf7aJqmEzy6xnLgsh5nHY1GUzJzv9FohIWFhcn34NXr9SbHSikYDoeTvgsLCxiNRiilTOmpCVzl5jbcX2UPniGHs1uv17PjOvvo2ErcL2wUtmHeLMvCwsLEXrUgDT5hSwAYDodr9IrPs3SelTiYX/DSftFOZXbyq86qWxc/5cu2WlhYmJpjnUPl78YN2WKO4pzakvvyPCjP8G/2+5rONVJ/Zx/n/sPhcCrm2IbA6vzX8oLz8+jjYpj71xYgvV5v4pu1BK9y8hjqe7W4V3tx365FtOa4mm3Zf9SHmqbBysrKeWsG2IdpFj4CiZGJkYmRNXslRnqda5QYuXtj5MrKyhreEz2qZ4hmgYkKzQKxQ6oR43wIHm3V2cNpOJHXAledoZZoOeE5Q8bYLpkxhfxN02A4HHaOy0klnE7t5mwVckQCiX8KbMFH7TocDtc4JwMYj6mk52fNay2hqY7BQ2VQfm4BEgsLDVAAExvp8fje9U9lULuzPNqvazEVCyXnvwqINbn6/dXaSMzpYDBYM9duUcS2dP6oix5NJiwXz5ee41h11GVrXVTU4lhld+cATGJR54Ptz0lVj3GSZV+Nc7PmOWR0+uoxziG8OIi2YVft53ywBvYhW8xhAEm0Z/91ccrfk6ZpPfgIJEYmRiZGKiVGrrW/2joxcs/HyF3eUhgDaEJmAYPYAbocwR1XZ4vql1bAoi8rzQbjyoSbZB6PEz47eQRzyBH8uqoJLCdXCRzVErUmXq2yuMQbejl7qs7OoVhWll2Ph7NHgmNdw1Yqu46twMVyMwjz/EffWhVSEywvFDRwdI5i/hVAVDaWS+dG/UDJJcyoTLFeYVeWXeeMv9f0Yt/nWNQqa80/HHiyH3D1qmtuGUCdTdiOKqvqqfLWfIfJ5R2OLxdLbpyFhYUJcHEeUoDmJN7v9y0wx9yGL/E8MR/1J45Ll7Nc/LPOtdhzn7vAJKlOiZGJkYmRiZGJkXs/RnbRhr00Ixxy1i24IKecKqAGV+fXJKu3d52MLIde4atRtV98jslW+aJKxrJyfz6nPPkcJ9rRaGSrb6w3kwYnJwl1XieL9uXKgrNHjMdtef65ounswrKpHpoQWZauZKfnawnX6R1BORgMpvjV2vPY0UZ9IKqNGrj6OfrwooT1Z9Bm3XgOWAa1dZx3lSBN2rqwUr61OeA+tXmPPjzH7rzKqDKFf2lcsU1YX7XFrLnkz+qfmuP0bgDbW9vF2BrnPB7bXHMBgKlqMI/pcqyzcRdIufju8v2k+SgxMjEyMTIxkvskRu5bGLkRL82wxg8ha8Gqwa4VKxcMrKAGZExOnI9zWtXRCg3zdzrwd6evO68gqm3YFi6A2elcO7avVtFYDq4iAKv7wVVflW9hYQGDwWCqAuV0rlVA9ZhWonQe2WYqH8+V2tUlHNUnbFCzM48d9uHKXbSr+TPziaqbAgf7Nh9n2XUu2Z+jQlRbbOn8aWKIPmy/Ll2c/6quzJPb1yr07IcK+I6vjslxrH7jqvaOt8sZQZpUnU+xfi7pB+/wATf/bB+2oeYn9cHgXevHOrDNXH5QH+DF8Sw7Jq2fEiMTI/V4YmRipM5hYuR0fx53b8HIDdlSCKy9ynMTz+c1MPSzU8ABUfxlA/DteWcAF0R8XCtpTm6WSZ3dgY+7+mfAYP3Z4dz+0TgeFS2tAqg95tU/xmE5+PZ3jbcDOj4XcnF/tqnycPbTc7WFAM+Znp8VDNG2K/k4wGbAY2Bh3+G/umXHJbdoXwML1V8Tg6u6uMTCD6Zz7Gqfeas4qlv4qcaIm5t5PmvC08WIJuuYC7Uh2y2IF28cl87nedERlTQH8pqzuD3PP+vDMT0L3FV/p5fbulPLjbwgd7mpa+6TuikxMjHS8U+MTIxMjNx7MLKLdvmCS52r63t8ZuW4MqHOMa8SNbm6EpSrPPF5lte10ytjN8nclvlq4KusfDXNAOcARe1bk1tl5CRfk5vbuvYuwNyYLCcnpdr2lminzs/9aiDOutfmVxOVVjlrFV9HzIP9iuXjeWf9dFuF8yMFJPVNta+eY50i3tin+LzqFLxURydPkFac1Bc1DpmXjqf+UkvUKrPy5b+8OJuVhB2xrzB4zVrARV+NIwdcNdlnyca2YR05/9QWgsFft/Q4WZLWR4mRiZHcNzEyMVJtnRi52ndvxsgNu8PFA+sVdJAaQwNAnaPWr8aLjztQm8VHeTjDA2tfFVuThcfViQ4+OoYLWMeXeTtn5sBRu6iutaCeJ9mwHEpsL5ZJk7Mmw2jPW2Y0+GclldptYhecLMto1D7Y7XxYx6/x5MB0c8B6cz8FL8e3ZuOYr1r8uW0M8bm2OHELHT7ngNKNx3Ne8xX1YfVLtaPKqeeULwOoVrNqyVN5a07h51t0y4bK5GJJ31alCxiVoZbMa34aMjoZtD2PX7PtPICWNJsSIxMjdczESKwZR+2SGJkYuSdgZBdt+AVXzfE0YQbFg5fsMOqkzlhB6hQ8bpd8akB1LL6lqeOWMl0F4XPqcF124Ilz42tf/quOobfeNdm6MeO7S8BcoegKrnm2JnBFq7aVpStZ8nGXmBRI1a4hV42/9tVkx4mTt6nEd+WhtmA5nI+7W/xdcjpd47MmSubvfKo2B5rMnd/WEk0tcdXiRcdmG3Ff9d2aLPw5tid0AaP2qX3W7+pjqg9/d3PMMauVXNaZqQbGaiMnc5fvzNI7aeMoMTIxUtslRiZGujYqS2Lkno2RG/KWwlowOaG0Db8NpuaA7NgcvNpOk6STR4NFJ73LaYP0Sp3Hr5FLbPr2GJVJvztZZjkyJxD+p7pH5UcBoivIagsH57DBX7eO1OzDidrp5249Az4RKbFdGRh0Lt3cqkxuywProDbXZNFl2xhLgV0rQdxe9VT5+bj+rVWOdT70n/Mpp4+OpQs51pHP6RuUHAiqTaO/+82TWcChcrlFATD9FiQXp7Vxw5YBMOq/2k5lmlePLjnU5jGGazcv36Q6JUYmRiZGJkYmRu7bGLlhr4XnAVVIve2tgKDKaNDprV43uTrxXUGh7eZdSHAwqwM7kFI9XSINni7YHH/WwwEP89C5cCAVCVXt6cZUqunt2qm+et7poQlcwYT5xW1hPe4WGRpMKocmDTeOtlO/47FqfsA21ASqSYZ5O7mVBye6GuAzqYxdoBeyMRDrD19qn3l8Q+2nv9nDnxlgnX1UFpc/FDAciKg9nE85G+o/tbuLJ13EuVzq7NflezymVpo1j7r8oTRvrkyqU2JkYqRrp/rq+cTIxEi1X2LknoWRG3aHq2uvak1JYPXNPEHOuWrB5wKZ+ToH1iCpJUkeP4LHAZVLKrUJBqZfO8uTzKQVoi5yVSoOcE7ICtZhn9iP7eaJdeE2NR1qCwLn0GwvpwP7hgbXwsLC1CtqtcrByVSrLEp8jPuwP7vKGMvD2xSUpyYstSVTbeGgcVIDQwcCXXHi7KDJmiuiLg6Ct3ueQIGf5XBJnHlEv9qYzMvpyxVC9cuwP+8VVx8O4n3/TlcGfvVtbqc2YL1rIO7mzIFArfLKvJyNXa5i+3eBfdL8lBiZGJkYmRiZGLn3Y2QXbcjvcPFnl+R1b6YzkirkHNE5DpMm99o+1xoQOd56XMFqlp6qE8vIe505uWjSYN2jLe+dr4GEk5Ftr3qWUtY8L6A614i3HKjMbI/aVgznH3yedW2aZlItUjld4nDJuWbf4M082NYst9pUk7HbSsEy6aKE22pCDNL98c4/nQ1mJSJto77MY6sN4rW5aluNcX31rNNB50LldcfdvKgd+Xc/9FfvGdScz3LC51gPHRSEdGFdyyHOL1j+mAO2scocsrh55HFj7mqLHc1NStF/PeCS1FJiZGIk2yMxMjFS5zQxcu/ByC7akDtcPBEqJDBdTZq3MuWSvH7XiVVDOQdnObitGlC3EHRVW7QPO5D+ZedUnVQf1Zv/1fryeZcM4jwHgAMVR5pg3dzwP7ax2rPLX5QXyx/ECcGBdpfsMTZXh3is0Wg09epWBSP2q1m+rADjKkRaXXSJKvrUwJHnpJZw+v3u+orzLf4Xeqvszh+0Hds42vf7/TXzrG+/0vO1ZOfmoUsfbeeAJM7xnHEOU8DQ+FC5XO5iO0YbfmZHfxDV+ZOLfZdDeCydW8ejK88krY8SI6f7JEYmRqpOiZGJkXs7Rm7I73CNRqOpvaTq7EHhUBHEfHXKV67a1ymsMnA7dXxOOno++rmr4VoQ8ZWwggTrUQuucAT+ZfRoo1sSlA/r62zNc8GO1uUEIXdta4JWF2ty8dW9Vky5rXvrlgv6GjiXUtY8TN3v99fYWCt+tQDjAGQ7OF1rc6G81Xb8Y4sqJ9tLkwXz4oWNjhP6uaQYtg6AZH04Jvh4zReYn7YFVn8k0v2oJceZyhjHNCaiX1QsA4CUQh61I1eVNb44rtmm0cdV/weDAfr9/lQOY9006fOCgMdTHfjNZJxbeM7cIoMrcqx7rdJWy826yAs+KkfS+ikxMjEyKDFy7fHEyMTIfQUjN+S18OyYoUTcntRkppMQx2rG0oTDE83Br06hARDHSlndEqAJQBOaVk9Y3lIK+v1+tcrDMnDCcYHEfV0yUyDVRNQ0q7dTWT6X/LsSjquwhdyqnyYjZyNuF+Ny2whs9RM3d6yXa9u1IFFwinnl8dUH2W6xeKhV3oDp33LQYFbf1rnhvzFOLAp47tQuvDhTG9Rsy34SxHPikjkf4wTDY2qCVsBRu6mNo52+ojZ4Apj8ACxTDbCUn8rQ9eplnRsex4GF9ldeLhaCP+cz/s7gr7K7+HA+r21rC9eohEcbjlWeHzd/SfNRYiSmdGQeiZGJkYmRq3ZQSozcezByQ57hYuO7yQXW7tnUhMbVv/jMgc5OrEkSmK68OJBQXnyO5eXP3F4dOQKeDR3Br2NEe+att1xZTk2aSgwI2j8qC2oztb/qzUlMk5f242SkfGLutJqmc63A5IJDAY159Xq9CZiz3O72sb41yS1a3Fgxp+zDOp9qBzdX3EYr1zVySYjBvtfrTW7dDwaDNYsvtWmAiKvq6EIj/L0rYXUl0Zo99JjmCK6s1XiyT9ZAgI/xcwxMvF/dAZ4D7ziuvtm1iNE+ysv5dSwkuuLMAUTNB2ugznMd3zUvB2lFM2l+SoxMjGQ+iZFr54rbJEau5ZcYuedgZG0MYAOf4QKmE6YKFdWSEEyTSfzVRB4Kj0bTe4b5dnmQcxx2GPer1RFg2kerL7zgCHkiGfFChCeZE4dLwg5wHUA42bgdX33rQ5dsA50bFwy1fqwzn9d/w+Fw8s8lNQVgrTxpEo3g4sobAyDbiBcTusBx4B633mNct3+bdXJ2DTl0zCB+FoIXCax3VJK4v/MBNzbfTg+9YixdWNWSAcsTNgmfCt113tmODObaxvmIIxdj3F79iGNGgc3FiOvv5OkCqS79hsPhmudvOL+ErKqz2rIGuCqPk5PHcL7keLs86qhmr6T5KDEyMTIxMjEyMXLvx8jONrsKoL1er4kg5CThJoAnUJXlpKGVDT7Px4K0rY7NY0SgaDWL29Xkdp9Z31pSVp69Xm9NcqrZLZIr/61tX2iatbfDGUzdHMV8cMUqbM2JnbeGuIQUY3BSCQB3wMX24sqSCxSWKyjab9q0aWou2YYqu9sjrlsSXDy4IJyn0q8Jj5OkW0yonvE5Enu/359s9WFbRhuuwgS5tyNx4mZ/cdsKQn71IY2BGIMXbDW7OqBwbbid2sqRJnudI553lY951/KU8nU+zX85sbvYUF+Lyrsbw+nJfGtjukWIs7+TyeVCABgMBl9tmubWawRLspQYmRjJtkiMXNsvMTIxcm/ByMFggKZp7CRs+AWXnJsS2vSd9Auj6C175tNFajQ+5pwjqGZAvQrm4OP2vP9VnUbH1jG0D7Ca+NRRFRS4j3Ocmr2co7LtXYIN0ipJF8Az8GmQR2IM+7EO0SaO88PNwZc/R5tIkAxQ3IZJgWc0Gk21Z73UP9h2Lgm7hFxbZOlc1AA0gELHU904sbgEzcRzqbZSQHaJLebVAbnqFd/jnCZM1lkrjqzbLBBw9uE3GNV0Uv90OYL5a7xp5dLFlwKjyq4go/HA8mlu4fmona+N6RYJLm9xXC0vL+cF1zooMTIx0umVGJkYqXokRu75GDkuHtgLrg15aQYDgAaTU4aEso7a7/exsrIyxUcrVy5J6LhsTJfU4zgnPHbISGr8A4KR6CM4OBA5IdecgW2h8gHTr3JVudnRmIdWbjThcxBzGxcADFIuEWmi4TnXYNOxou1oNFpTCdREoltMOMFrold5a0nU2ZsTgtpB/U4BQBMqP8PAPhLt2E5sF124sP3iXCxauPKrvqf685aV2tyw7d1ccV/2j5p/sk6ukqcyBOn4muDYPhqjPDYvziI2uK/TgWVUQHc5hOXQZ2lqWx64H+ebOM5VVvUz5w+sk+Y1bqsy65yx/bmNu7ugeiXNT4mRiZFsu8TIxMjEyGm+ewtGduXdDbngUidxiqkDR5BpIHFwuepfzZG4kgOsvmHFjRt/lXd81mqMm6zRaGRfs6pAwsdnkauEOB7s5L1eb41Dq8MzuUUTO7cDwBjHgaQLtvjMwbOysjIls7O7k6vWxs2H6qJtHBCrHAFobMuoNCqYxXgMZgEoMS+8vYHH4e0nXC3lsVmfSDThI/xdebh41LeF8RgagzW715Jh2EyrbhGDaoeQnwEyeGkyD93UxurnLJ8eVzvWFnla0XOxEvaq5RXHl8+rDdmOCsIKKgo0XeOoPVxc1BYCbAv2DQfASfNRYmRipMqYGJkYmRi5SnsLRnblsQ254IrBNFEHuYnhN9twnxBajc1tXYLXANCEXgM4NbhrrzqpLgyMOqY6kfLnKiHLxAnSJRdu45xG3xjVBSLBg+dAx1InrgWyG4vfEDQr4NWHdK5CBk7oEYC130HgLS5ObpZJg4sTmfLR/iEL+xH3UTu7uVS/ZrtwsnGLovBDByj6FioGTR6Hq2hq99Cb+2nb6M8Jkbd71AApbB2kCdC9/tdVmV0OYvtpjGui1rlVu/CcOb9S/+L5UPnd4kvn1uUdBV+d85oPsB/yyxPUJ9iW6t81gE2aTYmRiZG1sRIjEyMTI/d+jNzwO1wuCbOwzhG6ErUe4+DRCdQkE+eiSuAmNIJMqzAqP8vqAkz1VvsoqKhNuBLobBE2Y6Dl42p/l/CdgzHpnKj8tYdntWrT5QMxTvCJhyDZxqxLHOd51d/BUBuoP+lWArVPJDp+1sCBLN9+Vptp4gnfcnbkz6q3JlGWtZYMuaqofdXvdJ44yTLo87g8H+rrrH/01wpk2IIXFG4xUfMBHVN92+Udbhtg5sbk8Ryv6O/G1jl0OjpZ3RjMU+3rfJbjIr6HL2iVmecm+nM8KZCyvyt1gUlSnRIjEyNVX6bEyGk78ufEyFVKjNz9MbKLNuyHj93VLVcCakap3TrvCiqXCDVp8BYCHZ95ue8u8bATuO+aWGsJ2R1XcrpzMo/vzh6afLvAg9tr23AmlsWBnOqmAKH6qR/o72MEf03SLikOBoOpBz41iTFPdyz0qr3ViI8pbw5ilTn6KwBqBZP7qT+q78WY4QNRrawBgANhXXixvsxbgUDnhsdRgGQg1facD2p+75Kzzl2tOsU8Qm+eB12EsV4qly6QnM/X4sGBnPq9ju/O1/JRkMtpPK5uu6iR062mVxefpDolRiZGJkYmRiZGTvxj6LAAACAASURBVLfdGzGyi2a/t3OdFA7DinZVSmuJuzZZ8xwH2kTDV7I6wS6o1DldAmJyAeXO621ptYH77ioGXQ7M4KFVJ5eknE4KhPEgtNMrbFcDdeYfMqgf9Pv9qS0bysfNr9qc9Y3j+kpjZy/mPxqN1uwjd1tNeLHCVao4F/943tmWOh9RMXSA6+aZfZIfPGb7uUWLs79LlvNUr50/abwrdQGIbpEJ/TV36P575wvOpmxr9cdaXCiYzmobPN02EjfnDKzBQ+OA50JBlv/GPLLfcRyo/y0srP4OiuZg1SfpmqHEyLXnEyMTIxMjEyNZjr0RIzfkDpdWCPRWHDC9R5SPRx8OgPjX6/WmHEOdVZO4O89tmO8sHuEcoV981uqk68vHtPrAcjkZ1AHVkZSibdhXq2Yqdw28ua8GvksEtcTA209YBtUj2ur2BFfhqgG246dbY9Sm3FcrcM62QbW5Zp4xLu9pZ9v2er014OwSqPJlHTXJBaixnLXFDRM/MO9AxAGAbjnSCiXLx/HDdlCfcOOwTPpZEz3bggHc2VBBdp5550SvdgmeuhDRxM92cqAQxM9Y6Ku1gbVvHlMddVGlflEDdCaXH5xdktZHiZHTfRMjEyMTIxMjuf++gJG7fIerS+HJIHJr1jk7T9I8ijtj1hRnw3YlcubBwFNLphoscdxVCphnfI5EwODkdGMeKgeP25VwOQmono4v66l81ZYKTI6X2tzNG88//+NzNTswiDn9mZ/aTNurXdSvtGKi/VW3kI0rLHzeJQk3r/y35hvaz8WjI5eoVRblFQAZ/9i2KiePMWtR6BZSNf90gFUDHY0BHpvPOV/l465SrraMdlpJrM2DyurI6cX2rsnBY+hiw+VMF+uz5E+qU2Lk9HiJkYmRiZGJkdFub8PILtqQLYWqkP6r9elKhG5ieayu75Gg+Za248+JxvFiZ1GH7poM5VHjF2M7OzAvJ5cmCx1L27EjaVAE6XYJ5VULbAeYOnYtObvEUaMuW2swsgxapYu2ushxwaR61wDbBd08CU2/6zgujvR7jSeP6yqVTs6afYNmJSPnt7rY0M9KfLwr/vhYzTdqvlyT0bXjnKI6avWyFh+qX8Q+j8G27dJnHmI7M/jNcwHlckrIpDkjaT5KjPSy6PnEyMTIxMjEyD0ZI7vG3+UthZoQOXHzRKvzuUlQUkfpmviaczkg4c9adXB9ox1PjBq3Fki1IFVQcY4667Y/O3lt0udNwKqbysA8nEO5JF7K6luIeBtEtNVXnCq/WQsH1SHkVZBUO4WurtKngKG2AKZ/54b7OVtpIor+vNjRqq3bFqGyBF+udKvMmrjd367E6vxN5XCxr21mUU0/Phef2TYxh9qGZec5ctVWHUMXDiyLq9a7mOL+XfLplhHmXVvE6JzWtpKEfZh0+wSPxcS8wsbrmc+kaUqMTIzkPomRiZHaZhYlRu4dGLlhv8MVpIYJ4dTZ1ejuWNf+1hiLz/HVrwKBk4cNxXw1+HmsCGC+VVwDPealfFgWPq5A7ECK7aMyuiBwlRtg9Xde3PwosOh8sF2dvTUAagHjvncBvyMHFNpebaCBym+rYXlcHw1St0ByyYaTks6v2lATvX7W/cucuHjOWDeNP63kaF+XrFQX10d9XvdNO52YrwIU83b+VNODY9sBdfiKLmq4vyOdJwdAuiddiceobUNhctu9NF7Ud2fJMI9+zjeTdo0SIxMjEyMTIxMj9z2M3LBnuGpO4hIyfw8ha/s4a8lSHVMrIAwYnGQ0GXQ5N1/tu7+c0N1kufYuaJ1dajqHbtxfQVTPO4BXPdlm7hyPVdPT/avNv/MbDZgYL/b5OhtxEEd7TSLMxwWcJne1q9NfbawyOb21v8oSMcBz6SonXElmv3Z2rgFzkNv+UJu/2uKKAWTWljPn72wz1p37xHf3Kt2uGFJbaFsH7DVZ1X9qMRhtmQ9XbLWvyqW+yzxUd9WXAdbluZpvqI0URGblkKQ6JUYmRjo5EyMTIx0lRu69GLnLF1zs9KGEUpeDaX9N4LVAVcPHZ3YOThRaUajxUoOpfI63S0RBbpJdwmfw5OqTk4fHd7dEFVRUdpatlvR17K5bxZGonRwu2FUe1b+mh84TyxT2YB6sj9OZ9XF9NJi0Xw3oeUzWy43BSahmY5cQ+QFo5c1VWZY/XgWs1TE3htveUrObJiDexsCxx+BYS0y1GKzZTXOE2pGJ3zrlxorvzn46voKpfnbtVe6aXfkHXpmHyllbAAD+YfUasNbyVq/Xm3pFrlsEJM2mxMjEyMTIxMjEyMTIDdlSGJPHvxquyUMTFjueJq2a0Jy81KAxJn9n3gpKLomynBwgOn58DufQsVRmBaDBYDCVsLlNHFcZ2Unir0s+MQfqjM4WbOua/KqLk8VRyK1bF1ziUsfWY9ovZHWv1+XzvKWF7cny8XdOgrowUXBg2d0taZ07nm+2P7etycnnXAVHZdSqjc6305H1c9uInI/qOZeYlIf6h1twxNw6UIj2uiBhmWvE5zXHuHnqqrS6LQ1s71joOUBwPsNycwyzfFoh5HO1BUFXtZ3zaRBvbeF8HvabBShJnhIjEyOVEiMTI7VfYuSej5Fd9t3lCy4X8LMcyPHoOuYCX53QJX/mozyYtNLEfTUZ1sbuCjy1lQZPjO9+SLBmH+anwdwFDNyeQdm1q/XrIg4U1i1ItxtokNRARJMAJ34nsy484jwnZbfwUNDS5Fsbp0sGHqPX601VkWr6qd8y9fv96g9Xqm/pudqxWsw6vRRonJ34t17cNiXmq7Kp7sGLfaXfX5u6mD/7tfqAzpHaTLfmqEw6RzxWV/7TxFyrqvMiyFVb1XbM3wFgV97VsRUg1TdnxX/SWkqMTIxkSoxMjEyM3DcxckNeC6/BH381ObBQ7ha7JpowqBo+vuukqcG0nQsmloeTcYwzaxJYT5dYdFLDMTQ4SilTvxTOOrBNtSKgCZVt5/46p2R5+W/IpLaq8WLwncUXmN5mwXPMtnN24Xb8MGfo2TTNGmDWsbVSq4GtQK/+FroOBoMpvmp/F3z9fn9qTLcYUaCpJTu2JduglsR0HmoJgpOY8nSAH3w1iev4zhejHSd1nmMFpfAHXQw5n2adQyeWPbYDuMUTy8T683YKAJNq/Gg0mpxTWzjZIp5rWygUpDUXan5V0GDZuU8NxFweYd5dQJLUTYmRiZGs3zx8gcTIxMjEyL0JI8uugmiv12t4QlQgnhQXJEx6i1QDTJMoOyEHb+1WZrTl8zqBMRaT3ooNJ2A5lVdtbHVcreToJNb4chJmEGTH07lwFas4p/Zlp6tVBPStRaor81bgjPlj2+lccOJnngoqPJ+coNgGzg9VXpc4NOGpn7t2tYSqyTj4RRvXNigS1Wg0mqpmueSrNuLEy+TAYZYPs61qc8v6sd20bcitsvEx9hWuCKoOLl7Yb1hmtyhxCyK3GGAd2D9ZNpezamDCNnK+7/Tj43zejc/5JeSM9iqX09f5w2Aw+GrTNLdeo1CSpcTIxMjEyMTIxMh9AyPHF5fWQTbkGS51Sk3Ork2c1+B2SUWJnYO/axvHU40zCwTVKR0PdV4GOQ4GBjIHIkG1ff4hJ1fUnN4acCEXH3cJvQbebg5qcx3fo8LCQRZy8K1+B2JunJot1H6cKKKyxoDLn5mPju1sy21qdufvYQd+uxgnIq0Yd40RbbidVj8VFJx+akPmpbbl2NBFyKy5i3MK6NyXk3GMr6/yjfYKBKoDV8UBTNlcZWMd2TY1fXSBUMsDHPfK08Vj2EcXxmy/Lh1YDwVDtrECO/PSvjWAUwBNWh8lRiZGJkYmRiZG7v0Y2UUbtqUwEkR8BzCV9EIprlS4q1blq/3js3sFKgenJggdwwUV31aM7zx+yM0UCUtvw84DdA5I3YS5ylTYlm/PMn8edzgcTtkrZGXduLqi8nA1i2XSOWUwVX8IH+laaNQqruEr6jPBk9uw3jqfqpcmA7ZH8FEfHQwGU7ZkW0cC04SviwXV3y1kwn5sU42r0DH2amtMaBxofIas4cNOJ7ZJ/NMtAerj3J8B3MWeA0gFgpWVlTW+Ez6t8xh9nS/VFg1hU07EbE+NDbYD+yDPSZwLH1TQ0TsIKh/7qOY1Pc56MKCqri7mamN2xUXS+ikxMjGS5UyMTIxMjNw7MbKLNmRLYbyikRO9A2g3FhtBnVOBgHk4gBrLM2lXc6h55FAHjYDV5OCCtEtW1bU22Swr93UVuHDoSCwrKysT/R2w1pKAS+psB60UxbGuLSDKWxMJb7lgO3EC0gqjftcx2MbzzoWzAScTlpn1dwuAqNjp2BEn4U8MsOxHkYgGg8EUIAafCPhox/3DZtGvy9c5cbgFk0u+/F0Bh/vEZ9VNdVG+ulhwMnP86DgKYrWKk8adflcfjD4s18LCwmRver/ft3lA+WjucAuWWt7RuXKJX32bbRrH+O5BzAlvo2Df4n7BK7cUro8SIxMjEyMTIxMj9w2MXF5eRlPZUrhhF1zqoCwEtbUJLL6H4dm46iA1x6k5M4/Nx5zzOCfVtvydJ4snRNtxwld7KHXNhwKLS7DqQF1g5WTlzzxXuuVBkzG3K6WseVDWta3J6wDbzQvz5cDWLRFsO5Wd/3JFhfsxv5jDOKYAwIkggJJ/iJBv56tt3JwFxd505/M8V+oHNeKEqePzcQYBBwgxXti/y4/cYkb1ZRu4qh/LpmOonHye53g0Gq1J/uyHXTHj+PG4zrcc6MZ46gdKmus0pzFffr6nKy/q9ozwLae3y9l5wbU+SoxMjEyMTIxMjNw3MPIaf4YLWHvFHwIGqWOxs9aSIvd1V+LskG4c/qyVIja2c2x+6JJlY5k1ofK4nGzYJvr6zqiy8C1iPh/f3a1ulp0/c1J01RB1MpeU2BbszLU3G3H/0JWTTeigQacJSm/FM0+2SfSJc2w/XaxoP7Y9B7T6R9dtZ9ZBqzPsJyyrsw8fq1V5wu5xPF55OxgMsLi4OBkrdNC5DZnU53lrgas8udhg3eN8yM0xH7K438sIcvbWWGJyeYGJwV3lV146NzUZdU6Cr+aiWDyEbTSGnaya+FlnloftrbHOuuqbymqLLwXR+M4LJTfXSbtGiZGJkSxfYmRiZGLkvoWRG7qlUI3ohHAJjc9pII3HmDKmGqyWdBxIcJXNJdhagDMp4OkYfKzGX6+81WF7vdWtDwy4ys/J7ICFv6t9FdBrC4KYU7cn3o0f5PTTPqGz9nHtGAziXBdosG7xl/vyQ8uaiDXomU/YRMn5MNtPK4TRjoHV+a5bGHA8MEDxYqKWEONYVLLifGzT0HFVXkc6Lywj+yzbqpZM1Z48voKEJkaeY40f1ksXdrXkr8d1fJ1TtVnNnm6Ona5BvACo5RuXm5iiv4sv53fBM/qsrKzkHa51UGJkYqTqnBiZGMn2SozcezBynI+uuTtcIQAbRIOQP7uErcJHcACYuvXOk6YA4+Tidjyma+NAyiUpDs64Tcw6uAnV2818jNuzbfTNPV168diaCDkAOOF0Oa0bp1bNUlI78jF24tDfOb8LXh3DARrvDS+lTPaBK/F8urf+6G+rxGf1NZccVW+eb5cIg9SHnI5uPLWpLhxUfpaTbcFzxPy7kr/yDru5Khrz08Tv7MsA73xF24d/xzjcXtu4CrIubIKH2rrLN3mx6nSPan2Mr7kxKrNMbsHF4MkLia654gp62JRjRXVnYh5J66fEyMRIpsTIxMjESK/7no6RXbQh6KkO3hUwPEGclOOcJlZ9uJSdgPvEuPy9No5z7IlBer2p6lQNXMIpOICUVC5NyAEWmiyir3strsrsKis8BuuutuNgcM6i2wiYRqPpNyIxP5VDZQw5IthHo9Gat0RpBYjlDN4azMFHx3bgyd91UaOk/hltNek6cFf9NQEwwHACY90YZKJ/tNEfGIzxXMWXZXG2Y99yfSM2eG4CeF1FSivUNfszhV03bdo01Z//su9p8hyNRpPnFBYWFqa2j8TfeHMTLzTcYqVmK80LHAexRz/04D6j0WhNNV4fiOdFrc5pjOdiwlUd2beYR4xXe4ZE7R1/FeSS5qfEyMRI5qdyJEYmRiZG7v0YucsXXGz0+O4SMBuck5P+MCAblMHAJXIO6DinwcWTwxR8XPVMZdFXcKr+2l8BgPVzSZ2rFDqJnGTYwZqmmQS3JuuYdPf7Fvy3FkhKTp+QG1ibsDkBcgLiKpbOm0tUCozORjpXHFQ1fZUf68OVO5dMuDoWxJWY4OPsBUw/HKx+pQsFnWsd1+mg5/SzyhZysA31M+upNuFnLpjivFbmawlZ5y4AoUZd+nASDz4KOmw7l0xZdqV+v2+B3+mviV/9W9vEcQUZB4q1xSHHifbROdbxlZeCYl50rZ8SIxMjEyMTIxMjp2lvxciuOfmp7A9hx+Gk6AyqgRnntKLmkq47zp81AbjbpMpTdQiZo39tHHWc+OtuYfNixk202oSrF24BFLdBXUVF+TFwO0eJc1xxY7ldIAXAcXVSQZYrd84/anKr7EqarLSCpLpGm7A/j8tj1vgzEMbcqC7Mi+VwOjr9uW8cc77J86K8nS2dDVmGLgDT3/xgvsrLxQH/dYs/F188B+4CQMd21b0asKhd1H9jXB6LE676svoBLzZ1PKdXV0y48yGTgkvN9zkHsy66uGaqgWvSrlNiZGJkYmRiZGLk3o2Ru4ygbEQWQB1Ib90F8ZtlmCf3Z+dS0Ij28dc5TIxfc4wYJ/hzVUq/s0ysF/NUh+KJYpn5uFapHD8nt/bhNsGXZdSxtT/ziWoWv8qUE4zax4GpJlR1bpWXnZxlrQWJJjytNLhkpHrGmGEvlh+YrjZzRZn7awWVk6j6SE0m1i9soX4b/GJuWH71fzf3wZ+J2+hWCD0f/dkH2C7OPx0oOGBj3krcX4HGxZ8jto9WFfm4VpbZLjxu+IsusBwQuOTvdFRg4AWYzm/MMX92YKZVV/VHt+Cq/UtaPyVGJkYmRiZGJkYmRs58aUYp5XYAzm2a5kc1AziHn5c0AcVnBSedHA1KB0hGlzXfnRO7MdhpuqoCTrZoo5Pm9OdEwu3UqRhYmRcfc0Gm42o7Pu4WAMxDx2Kn0yTPCZtlYj3YTnGOk3a0Y978al2WV22gNg5+7gcotZ0DCrWf80e1DctdSwwKEGwrJjf/KqObW/Uh1y/8tPbjnG5M1ZVto7GgizPWMfSaBbou/pS/ysnJVfVh/3S6sWw8PvdRXjpGTQf18zjH/uB4ufhz86NzqHp18WZeo9FoamGVNBsfgcRI1z4xMjEyMTIxUseo6bAnYaTjEzTzgqtpmi93na8FoUumKmhsgej6YTOXgHQS1JmcITlIWR6XSFRu5tk0zeRBQpaLqy0qg3NClpmTjCYk7aN/HTio/OyQXJ1SoHTOEzwdaDl78ZywDDyuHnf21kBingq8zt5ObpWX7aCVLh1Dk7Kzd5evuTmsJS3m64BH54n9r+Z3zDNijvVRPmxzt0BSfXTOdVyWryv58jhcHebq/bzx4EDN+W0t5lmPmsyRx3hrRchcA8Oab7l2KovK0BWLLP+shY72cf7lKsH7Os3CRyAxkmVMjEyMTIycPu/kS4zcczGy6znnDXktfJeTqZAqsBrE8XYJk53KOTpPhBtnnoVDzRk4YbCMTvda0NWcM34crmabOOacVZ0P8G+hUt1VlwgQriqEzG5bR80GClyalKI9Jzbn3GxLPa9BoUCic8DzEH+1j86fAl700zE0SDlZ1HRytuz6HjbVW/o8vsrmFssukTu/coDn+Dh9HJDpefYZ1VvPKZ9aLDswdO1YDp1H5u18zslak1ntoxVprSaq3HrnwIF7bQ74rpQudrSf87kuOyTNT4mRiZGJkYmRs9q584mRexZGdtGGvhZeE1YXQKjAWjnRtpqoag6vCYGTaZAGlkvCLJOe5wd3udqhcjr5XdJl/v1+v8pLic+rHF220eMhhwKcJigGFrVR8NM3YLFvOH1VlhjHAQHLWktAPGaXv7Ac7la1JuiQrZaQ4h/PgyZq5u100jZur7TrU9ONbeqSMsvHScoleSd7LU5dRZV1Ut76Wfdcu8qyyjWL3CLO+Skf033cNb/lWFEbuXG6bMZzyjkV8A+b61iOd+gSPOaxGcds0q5TYmRiZGJkYmS0SYzcNzFyQ+5wsQBA/TYtU1xV8i24mBB2IJ4MvQ2tiS/IJUxOGnyeHYGPseyzthi4LRtM7BDuNwX0djwHDycTNwa3c+PrK3F5jpRHzeldgCh1JSvVT3lxoNf04+/xt7Z4cXZTGZ0P6XcFTT2m9qgBdc2HWD7dTqLJ1MldG4PPc+zUANnN+axxuxYumrhcO543TfLaTvlzLqjJUtuGU5O5K48wxdYIBx6qi/5YqI6p/hXE8VKb79BRQaXmX+6c5kPtozmoFvtJ81NiZGIkf0+MxJr+iZGJkXsyRnbRhpQwXZDVqgH8j5WK/afBQ9sw7zC+LgTUWID/DROWmSsFzkGUGERUJrVFHFf5NCHqlgTV38nFgVhKmar6OZk5WTvn4d8wCJu4X/N24Mv2dWCgx1UPrSjUqhQx7zWg4/3MjpRPLGZqYBW2YNn0mB4P+VxFUD87ANS55x/WZPBlPi6h8Hguqfd6PSwuLq7pUwML9V8HojyWW6wov7AX/3Pjcfvgp/Gh/9TfHFixbDW/Zv0Y+OMf5y0l9WH+x/x0IcA6qc0cf/5ey1uzQKdGtfhIWh8lRiZGJkYmRvJYiZH7HkZu2B2upvFvbFFHmlWNcnxrt891Enlyglyb6M/naxOlRo/2/EN26gRxjHV0jqfOxADMtmK+NaBzFUC1k5NNdQ9b86/Rx3e1jfuuIN0VaCFbtOW+TGofDQrWWZOeBo8Ce/TreiYg7Ks+oYDOc6B6K5CzP7AvBh+ugPX7/alkWvt9DwVWtrVW1GIcBmBecDBPngf2G10Y6ZwoIOs8qTy8mHLgGj5YG8v5s7uTwPblvlwNY7BgUrnZrrxQ4Vd5O7mYHDi6NvzXASafB1YX0mo39a8aCKtdk3aNEiPX8k+MTIzU/omRiZFKewtG7vIFFzuMJlCgnqSdQ8RxnihNVO6zTrAmBE0gtQnhxM7JjtvqXwU7blsbVymSmcrBOrGzcRJmfbUvf9ekofqzTpysXDAyqZ0B/7srnCSUlyaS+Nd1u53twAlXk7XTk/9G0Lvk6xYazrYsK3+u8dAkq+DL+gWP+OsCvstGKpPal/+6WFD/dWDMurAdGaCcr+lYah/1SfYrB/iqv+PDNlBf0W1brtqtyVp9J2xQqzTWQIGBaDAYWJ1cDuEYCjvHFimOJ13E6mdX3eQ+XD1OWh8lRiZGJkYmRiZGrtK+ipEbcocrhFfBNckCawFBr6LDuAxOwGoFx/Fjg2i/kIP5ljL91h/+y3x1LJZVq3FafXPy8n5V/ht8OfkosKmucVwriOxUmiC0cqqAGBRbL+ZJYhoktfOsC4O1zpOrWEZbfitU2Dq2ecRx7s/2ZjkcqLE86s9qU50/bsf/VG9NmKFH6KCJJ+Yg/pbSbotZWVmZ0iU+1xKGA/AaqHMflVkTMs+jAk2Au1b02dZMqr+LPZfUdV6ZuCKrfbR/jFGbq+BX+90V9kmtjKne6ke1uVIQDBmCt7YJOdgebp7Zv5mH6qQLhaSrT4mRiZGJkYmRiZH7Nkbu8gVXzQFdRSvadB1jXsqTJzKCJ9r1er2J8+jVq/JyE8HB7RIwy9XlwBqY6iwsB9Ambv59ghiTv7NjuPFdYDmZOZEDmNoSoUDIfDRp8NhsY5bNLQTiuC48XICxPYP/YDCYJHtgOqC4UqQVSraDk43tzsDE/PnBapcc1c+YGBC4Ssu31JWfblcJGUJOrZIp6XxosnNgqMCliV8TnUvuPA8LCwtTv7/BvDnmNIFz/EZllm/56xw40GMZdfGiSbZp2t8N4goejxH21W0qPAdc6dPxXRyEvXWe1X9ZRu7LeUP1Zv6O3B0J1rcrH9d4JtUpMTIxMjEyMTIxct/ASHcuaMN+h0uF0OqYkiYgrTyo0JFE9BamS+aRdDRo+K+ThRNbTU91UK4YaeLiK2S+lRm3Q10S4CDR/uFoLC+AqQTFgRP6cDVOQcEFM9tWFwddCwB3jpOggjiPEeTAif8BwGAwmNw+5+BnwNKFggMVVy11Cw7WT33P2YH3BWslkOXhyijPK7C6yNAkr9WZGIdl1IWW6qFJRv2d7chyM9C6RBmyaDy5RRF/1+TLyY7106Sn7VTXWjuXC+ZZiKn9HNjWgKQmw+Li4hQohE9z5VEXlwrMTi7Wy/m/Ayyt6LkFQNLVo8TIxEjWPzEyMTIxct/DyA15hkuVAlaNUhM8JjkSHTtr9InjoRBXVzjBaWJlJ9Vx3XeWXas0IUcAFDuXBhDLrzKwPgwiXBFQ59SqmTpi7TYm6x0201/55v5hP54L57zOmXhOXHC6/hzgvKWB51jnJuRh4NQFiI7BvsJzwrK7cWp8WB+XUPl7gB3z0dcPswwaQzFGVMFYLm2r/WrJmkEneLL/ManPxnenA/fhv9GO/ZRlY99RXVgPJZc0XcLksUIOrQLGeZZd50Rzk7MRV3x5L73mIc2Hmqg5rwFrt0e4hM++GFVItpXOrwN0Z/PQTRdtSeujxMjEyMTIxEhnu8TIfQsjS23C5qVSSrNp06aJMDXHZCVc0uG+XKXQYFWQ0TFEtqlJqSU8d4yrbTwGy+baq/yaNGs6OLBVu8XEs7MqkMbEayLRJKSg5ZxabeTsxM7P8mjiZh35uCZLbut0cDJxsuDFiPNHPaagwwsVToZuEaHzwzLw/OieZgYKBXaufim5KqCSiz/1R01uDviZn7Njlw20L9uSx3Fzw4sGnl+XFHV81VkTM4MXV8hqcmm86HguRpTcokZtqH3DZrV82bV403iPY6xXV97rApk4try8/NWmaW69RtkkS4mRiZGJkYmRiZH7BkYOBgOMRqO1E4EN3lLIW4RUxAAAIABJREFUgumVr7YH/N5vTd5K7ko/PjvHYmd0fTQhM+ltSpXD3cZUcOS3tLAsmgTZceIcH9dEpI7BQOO+s41cotagczaK8wqoLtCYasmR+7NMzAvAVOD3er0pWfUtOLEfWm2jgcM25nHUJjUgi89avYwHdhVUFVwZnFhH1b2WfBjgYp5VT/UjBS8FYx2DgTkAwSXFaMv9FQh4jmbZlfs7n1Q+uojhcywnb6OqvfGsBtScxFUWBR+Nmdoc8vg6FucXBxA1foPBYFKF5wWLi0nOi656XPPDpPVTYmRiJP9NjEyMTIzc9zByw16aEcbj28TsYJpkeQL0tr1WP1wimEXqIK4ff9fkyO2dPOosmgg0CMIWmsh5MkNWvYXN7djmWilaWVlZ89aZCGSthjgbcMLRMaNvjOeqXGprTvj6Gk+1T8ilQBCy8xj6Sl6WjSuYWnULfpG4nY7aVkE9/nEyju8AsHPnTvT7/ak2Or81Chup3/Z67WtQXXDrnDKoajtNLFztjGMxV5zkIyZZNp63SNIxf1oVU0BV/+dKVG3xx3OjiwDWM4iTcfDRvKTH1ZYOZFlvjSsFNyb2H+XB7RVkNV5r/HSxGrKqLmzDeXxS83XS+ikxMjEyMTIxMjEyMXKXtxT2er1mcXFxTWLmybEDGyeJ4+6KU41bmwA+7l7t6ORwSbZmvPVMACddJ6s6lQINj8nt2SHYXjwGHw8eGoAA1iRWZwc9x7LogkFl4O+Ot9NbAVkBShOKVh4iiCLJOQqeUe1jcOR5rPVT3bm6xccYTGrVOdVP7ebePMbycDJRezPpokhtzPoFGNfix43J/skAwXMSfNQGLhZYLtaXx3A2YXnY7+K4W6w5G9V8m+dN9eU5VvvrMV4k1XKFLkrYT/i7zpHK6xZ9NftxG1epXFlZyS2F66DEyLWUGJkYmRiZGLk3YuQ4VmxQbcgFFz+Ey/zUeVyy0fNBcczd9uQ2PBYbvSshcR9tx3JrOweYykcdU3lo0OqY0Y77cpCqrDXeXC1TmdgWNcDmsV0Ci3ZqKweIrJMmfU3k/LpT1l1lV+IEEkEdsnBii/OsV63i5RKR+m3IxXbhLRv6MC8nYSb2MX1jlto62oTt+DMnZtZX9VC71eKH9dJEGGOq/A4cow8DkEuALNesNs4uLsZ5HnmRwfq69s7nY5ywe1CAr8rkgJX5cVJ3emqyd3lBfZu/6/wEL+3LttO8pHl6MBjkBdc6KDEyMZIpMTIxMjFy78XI8XZZe8E13691zaAYTK+gOYHUqiKsPFeItDpkhaeKAF1ZVq9ENdG5IHEOzc6g/XjSmVziY9DlcVU2Tna9Xm9KT2dHtuF4QTTpF8fYLgqMLK8DXz2m86m68Hw4PXnM0JVfE8u6OJ+oOTsf099tUUAL+YKvBr3aQv3UzRsfC53U1908sl6RANgOLCO35+Sg7Wo+7uZe41d9Xqu6Yd/wKQdA6s8xt0zqt/qd51wr1yo786zlDLeg4STK+qr/qA3ZrqyXVjT1u8oTOuhCgMdyrzTuAk3mqWPVbKjgxed5oZJ09SgxMjEyMTIxMjFy38bIDXktvDqOJls3+Q4suF8Ylp3MOQ6Pw8fdlXQQV4ZcUnUVQ666qCNpIlGKcRhgOHnHmJx8WQb3ukkHyFox6poblTdk1LZaWeBjHOw8Z66io/OitlG7uYqlVjBYFwc0bKdSypRd1Nd0DiNh8gJAgVx9NGTjxM92Y3AJfrUgZ75cbQnieWEA0wTC+/01HvWYi1cni7N7jBeLGH5AWW2hY7M9de70eQVXTVYb8sO+LKf6Zdh1nlzDFOdCNp5v5/N8nOOFq5luQaLj1QCJ5Ve5uZLcRU5/tg2PlbQ+SoxMjNQ5S4xMjEyM3DsxspPHzFHmIDWGJiD+B6xe/bureRXaObdLBFoNqd2iVEDQ8Zi/OrCbVHdMr9hdkPBfrtSFkzuwqAUc9w0do/rA8nHyqgFfVGQiicYx/hz8oi3b1FVzNIiVT5xzIK724Gom8+AKg/6eipsjZ0eVXRcBCjoKBsozfNwl8uDRJYuzG59Tn1D5NLEoKLoql1bbne/xnNV0Zxsy71q7kM0tUlgu3qLBY3A/9lWtxOn4IZPKpn6j/bXiqrbjOWf5dN6UXw0k1d5qO9WtFoPO12p+5GzWBbJJdUqMTIxMjEyMVEqM3Pswsos25C2Fqgw7tFOWnaZWveNAZR7MSwFM5XGVFP7MVTxNFjxGLelqRYV5ODlnBWHTNGt+lNJVDNW+tWBXmZhqSVDtGfbRJKv8HQ+WtwZmmvQ0iLVKxTzDPmoPrWR0+abaJPpzZYd5crV2Hr3Zt7hCW7OfiwFeHLlX+tbmUO3l9NO+al8FU7Wr9gEw2a7jwNMtFlw8B7kKu+YRPu/sG31Y5y67O/nURzXpc7J28a/y1hZ2tZzZZUOWQ/txjuN2LkbU5upHDsySZlNi5LQtdiuM3LwFozs/ZPV7KRg1IwAFBUADoKD9X9MApYTewEqvhxI2KgXNuMOoGWGhN/4x1nGnBs2Y2Srf+P8IYxlLO2qDBqX0gKbBcDx2KT0M0dBcNhjxHEzaFYzGsqMUjEbD9vtYr16vh7EXTLQbtUqhCduN+67OR9uuHWuEXm9hYode6WHUtCOWHvlBM57DgqljMexwPL1jSdH0CkajBs3CAgbDAZpm+nkxlqMAWPjmZ4FLL0iMRGLk7oaRXTi5Ib/DFQNpMqsN7pyc2/NtSJ005ekcS0HCkTpIl8zOmVRnBywKUG4snXQdLxbprk8tqHQ8/l5LIC7IuS9/V5BW23TJoQHiAoPnWROqs7kDdubl7OR8SS+Oavp36aXn1VfcbWkX0CGPVhRrCxzlMSuJqy1q9nPJvDbfXfEW55Vc0lUZHU8XD04m5tGlh4sL9QX2N54HVxlj33U6zSIFFaerxpnmHOWheZf51MCrZq+k9VNi5O6Hkc3+B2Pw7JMnfGdvLJqP+L7IRvHcF2hpznb7vey3sDC+4EqMTIzcUzByQy64OPGFcHzOgQCw9lWl0Y75uCTOARN83a1fHocX0M7InNR5q0Cc14sMt+h3xtardLaJ2in4BH++/VqbZOYdb4RxCeXQ/nXxywf/plncr1a3VmUJvXU+J/W+sR1Xq35tO0yOTapRExvHxVpbqWqPh31i7tryl/Jctdlo0m/V9g3YNGVcQWu4nDbhjSmZV+dr9RwQcx1ztFY/ljWqlmsT9aod1vo9H1u1actj1e6lAKdd+R7sGF6xZmGlYOCO87FVe63+8F8twTDPWoJi/lFR5PitJXhNfExdY3LFkm3hdHekSZjl0HZd9mT9ZiXoLnLgqfLMqlx2ya/nVQ/NodxG5dE7KUnrp8TI3RQjkUWEPZF647lk30yMTIxkvj8rjOyiDb3gUmFrTh3n9Wq+aVa3bCmgqDMrP26rdxHinMrqts1xEme5mR8nfr0TorJoYLpJCVvEazNduxqI8as2nb2j3+GbjsXdDn/6mrGTdm/62pUfwg5cMQUos3yLq9cRU/1+f/IAqv6gZXyOc3EMWJsUu8aL2FEf5j4hv9u3zv7qdHTjOxCs3Z1kmygPpVqCrQGc5ild3MY51TVyTW2hXNPD5VnXvjYPLJf6gi5gmE/Xoj2pTomRuytGAth6MfCelwPXOxF48DPWjI1PvAs482vAQ58FHH3c2vM/DbrkB8Dfvxo4/heBX3vCz0aGLrr0AuB9r2o/3/i2wL0e1X7+3AeAb36u/fyQPwCOOb7OY3kn8NbntJ+vdQzwiD8Gzvk28NG3tcdud1/gdvcDUM+FiZGJkWqHnzZGOlsFbdiWQp3YEMwpoM7F59UQ8bnmoHplGcaIyVYAckZn+bsuXtSQrjLndNRzTDp5wbPLLmzr+Kc/YMltZ8mQtPuTbpsB1sZIfOZErfPe7/enYkNjUfcza1x0gQv/PkxXHnB5QReBAWxdz/A5OVVGF5ddCbEGHjq+8ta7QGqv9eSDePMXg3PMv/Z1QBb/uE+XzjXZQo+uvknzU2LkboiRALDtCuDDbwZ+6a7+guu6JwD9RWDLgVUZr3G64tJWxjs9cPe74Lr8YuAvnghccDZw+/sB735xeyW7eQvw1ycBt7wb8D+nAef/L/Cck4FrH7uWx3AAvOqxwJc/Djz2Je0F8HAFOOGWwJlfB65zQ+D0/wSuuAw45vjJRRIwXURIjEyM3J0xckMuuFSYGohw0ox+cazmpBow3NYZFvDbMPgWq9v6oJUz5uHkCz31uSLWkz+7lzuo/g6c+DiPWbOfOhWfO2/7V/HpS96Emx98X9z2sIdV53N3pI/98BW4eOksPPS6r8GB/SMAtPq/+/uPR68s4DHXP3kGh5Y+d+nbcPZVXwIA/MZ1XoHDNl1vbhm+v/3r+NdL3jB17C5HPBHfu+rLOH/HN6eOP+rYt2Kxt5/l8+Pl83HKRS8EAJxwwB1x5yOeiK9vPQVfv+IUAMA9r/0HOHbLLcat1748A5ie5/CtxcXFqXFi3muvQOaEpVU358M18Io23DbaaIJzxOfdA6xKHC9cYVKZ2AZqO6ZawtQYD4BzC1mdH9abZVH7KPBrBTXmRCuy3I77O90dIDNwx3HNG24LWABU0vooMXL3xMhej2Loe98C/vjXgFvdA3jIM4FP/g3w2fevnr/xbYB3vhC4+PwpPfEnHwFe8MDpY8ccDzz9je3nr/4r8IHXt58f+GTg3O8A3/z8dPuT3gcccDDQNMDzH9Ae27w/8JJ/Ai67EHjj+ELwf05rZYwLr4/8FXDaqe25J/058OE3AT/6vsj3z8ALfr39fOAhwJNeA7zmCcBxNwN+79XA1z4NvP91q+0f9uz24nNe2rENOO3jwC/8CnCLu7a8vvsV4IBDgPO/CzzqpNZmp50KbNvqL7hGI+Df/hE49EjgTg8C3vIs4L8+1V5w/fBc4CeXA6d/GbjP44Bb3g09uQOVGDlNiZE/O4yszSGwQa+FV8HZcfW7S35dPGc5IfNyfGN8vrXJ/Vx7lplBJ4jbsKwKKDw+Tybz575qKz0e/bXCx8liNBphMBgYQAV+MrgEZ2z7DC5eOrNq82uC3nneY/CWcx6Mt5zzYPzVOQ+5WjzO3f4VnLHtM1gZ7Zw6fvq2T+OMbf82N5+bHnQPbB9uxRnbPoOl0bZ1yRD229Tbgrsd8VTc7Yin4qjNP4/zd3wTZ2z7DG5xyANwwc5v44xtn8Go8YvSK1d+hPec/0RsXbkQNzzgdvi3S/8K/37ZX+PS5XNwxKbjMGwG2Da4FB++8AW4bPm8qp/qD2bGZ1e5U+Bh/+U24VvMNxI283PjNk37Y6LxC/XRl2Xh2HMxxfJz3NcWdRqrtVyh8aQ61M5p3HHs8t8AYgYS5sd8XK6q2ZQXqmwb5qF5iPvwlg2XD/XHGl1e5O95obVrlBi5G2Jkj+Q+6vrAze8IvO9PgVP+ErjFXYDffh6wvKO9WPjJVuD+TwC++1/t94c8E/jPfwGeedd2u+FppwLf+RJwz0cBH38n8MantxdWr3tKe3fs4GsBb3oGsPmA9o7VaacCd394e3HynHsBSzuAP7hre5fnoc8C/v1D7YXSgYcB931cK+P1TmxlutWvAqee3N4JutGtW34vezjwy7/a8jvt1NV/z7zLqnxf+VdgaftY1i8C3/oC8Lont98PP7rlfdxN27Fe+yTg6b8CXHZR1f8AAEccAzz3XcDZ/w2843mrx+/3eODOv9nK+J0vdvOo0c3vBLzoH4Bb36O9oLv3Y4Ajrwc0zUw/TYxMjNzdMHJDfviYBWGH4TZxbmFhoXqVyxW0MErtitRdBeu4msBre8lZRj0f8unVLlfUog+Pzwmfz9V+BFKdJmzEt0v5Kl515MpEv99fA3irL6YAvrr1A/jfbZ/H3Y98Gi7Y8S18d9vnAACPuf478KELn4+fDC4BADzz5z6O15/d7pne3Nsfj73+O/G2cx+Oa2/+OdzliCfh/Rc8Bzc76F6491HPBgB8+pI34ZtXfBQA8H+v9wZcZ7+b4ORzH4kzr/oPPPWGH8ZibzNed/a98dqz7jWlw5Gbj8ejjn0rAOA7V34Kn7z4NQCAux/5VPzSIQ/Ehy58AS7aeToA4OTzHoWFsoinHf8RvOWcBwMAhs0KXnvWvbD/wqF40g3/CRcvnY33nv9kAMBND7on7nNUuy/8M5f8Jb5xxUdw+YpUKNdJZ131RVy8dDbudsRTcMMDboMHXeflWBo9F0dsOh6fuvgvOvuuNEu4cOd3cNz+t8ZRm0/EVcPL8OPl72P/hUPxzSs/hpXRTpxy0Qtx/6NegIP7R7ULhZW1CxWtIrmEy3HBPqcLpMXFxaltDu5ZCfZ/jUutLrpkpokzZHEy86JznlxQi38dS3/gkL936ccxFhWzsBef5/jUBR/Ly8CkdwyYD+vn4pzni8dxVTfVpYu0TegSD5EnrY8SI3djjOQF0gGHANe/SbtF7qJz2ue1jj4OOOTI1TY//8vA4ub2883uAJQCfPuLwM3u2B5b3ATc6FbAzqvaO2a3vDtwwZnAHR/QbrG78HvAwYcDBx3Wtj/xli2/078MNKOWF0p74TcattvoNm9p2wHtRdsv/Er7+Q77t3enPvHO9g7Qjm3AsTdq+b3hc8Bz7gO8+QvA792m5efo+F8E7vUY4J0nAV/6WDveE/8UuP392ztdS9vbMbto037A3R7W2uYbnwXe9Pvt8aNuADz9DcCVlwFveBrw3//ezcfRwYe3un32/a09zvgK8IRXrokXXZwnRk7zTIz86WHk8vJytd+GvhZeAcIl2zjPf+OzAgmAyV5N5cHj8PHoG79zEHtcI8G6LTEOADkoVO4gd/uaPztHikULX/G7W7khE1+x6/7UOBeBz3zVzqzzzQ6+NxawiFMuOgn3P+oF+P6Ob+Cc7V/GYLSES5bPxtaVC/GsEz6NPz/rbvjx8vfx7BM+g9ecdXecfN4j8cDrvBQnn/dIDJsV3PHwR+NjP3oF+r3N6JfN+Oylb8G9r/0cfPvKf8HJ5z0KTz7u/bh4+Ww0GOK9P3gyCgqed+IXMGwG+POz7or9Fw7Do459K04+75F47/lPwW0Pezg+cOEf4aYH3ROLvS045aIXYlPZH/e89jNx/o6v44Kd38ZDrvtnOKR/NBbLZjz62LfjVWfeCT308djr/zUKerh8+QK8/dyH4+D+0bjd4f8XH/3hy9DvbcamsgX/dulf4l7Xfha+c+Wn8L3tX5rY4w1n3w/bh1vxRyd+Hgtldkjc9KB7YnPvAHzkhy/CYm8Lbn7wvQG0F4NXrFyEZxx/Kjb1tszkw/Qr13o8bnXoQ/DRH74UNznonrjJQffAYm8z0Ey/4YgXMf1+fzL3OuccF+ET6pPsX7qFKfaxa3WHxwjih9JjsbiysjKJP/2BUbfY5MUUj6FjcVsAU0BTi9VYqGr+cC8fUFJQ4ZzDCz4lXhjGZwZ/AJP5i2PMz+VKl3/cnDp53WJZt6QweHO7kMftj0+anxIjd1+M3KPocx9YfbnElZcBj3858O8fXn05BQDc4KZArwfc8BfQ+SLGAw4GfvMZwD1+G3j/a9vnxF71OODF/9g+ezUPXfx94Pfv0m65vMcjV4+//3XAh94IPPV1wP4Hr1tNAO0F5Zc+2t5ZvOqK9nm7bVunsI0vFBIjEyOZfhYY2UUb8sPH+lkTYm3iQyG92lSQYCVd0ARvHjM+hwHCcRgkumTlcXgSA5xiEtVB2QlisvU4B3nIxlUBriKojC4I1XYMLqsyrLbZr3cQFsoitg+3YrG3ZfpZozH7wxavh8uXf4AGDd5yzm+iQYMrBz/CIf3rYNisYMfwShzYPwLLo+3YPtyKe137WfjR0nfxyYtfg0GzhGGzgmGzgt8//uNoMMSf/u8dsdLsxJu/9yA8/+dPa22MhQm/nwwuwcpoB64a/hj7LRyETb39sX24FUvNdhzUPxILZRMA4ND+dSbPXR22eL2JXQ7f1O4Lv2z5PFw5+BG2DS7DqT/8EyyPtmPHcCuGvWUsjbbhwIVrrXm26ndv8F40GKGH6f2+Sjc68K540Y2+hsWyBZ+77K3YPtyK5dFVk/NXrPwQIwxx6OIxiFe7z0tbFg7B5y59G7677bM4+6ov4RM/ehWedNw/YTgaYjAcTPlSfI4kzT8QyQkiti70ej0MBoM1Y7LPa+IDun/8OvqurKxM+Sdve+ALfbdQ50TqkljIpnK5GArSuOO2utiL3MNtuW/YQCv17k4A8+Z9/5rE2Q662FT+XQtwBgR3IaS+EH5Sy1kOEEOW2iIiaT5KjNy9MbJK7/0T4INvAK66sv3+3PsAbzlt9fzDj2tf9vChH64eu/xi4Mm3A254c+DlH26fw3r6G4C3Pw9AAZ74KuD//Abwqfe07Z9x5/a5pr85vW37gQuA3zgK+K1j222I7/p22+64mwGvOAV46UOBr38G+NXfBvY7oL1IeueL2jf8dVF/E/B3ZwOPPAF40m3bY//zZeA3rr3aZscYyx730vaO2PPu125PfNtX/XNXQde6LvBH7wSec2/gvz4J3OW32ovA0mufV/vTRwPLS8ArPwrc4CZtn8fcpL1YfP8F7QtJ+ovA+84GHnEC8OTbtO1e+VFgv/2BG90G+Jd3Af/0GmDUALf61Smfi21fiZGJkTpO2O6niZFdOWXD7nDxFbxOWhiKJ0ABhp0wyAmvTsLn1UDqBAoUzoDMgysmPL4zrgKf+64O4fTgW9l6OzXaRGUlEgQHXySWcLJwosXF+lQ/+ti3463n/NbYAMAfn/hFLJb9xl8LnnviFyafY7uh0r9f+g58beuH8ZvH/Cm+ccVHcOZVbZ+/OvcheMpxH8CLbvQ1vPiMX8BVw8uqcsxLr/zfO2DHcCtecuNvTY4tDbfhFf97m4mNjt1yC/zODf4WANAvi/j0pW+q8jugf/hc45511Rfw7Z98Eg855tVrzr3j3N/GxUtn4g9/7lM4YGE+fkyfueTNOKB/OG520L3wCwffH5+/7B0YYdC+nnZU1vgFgMlrbJeXl6eSmPodsOpznOgiZsPPouIW7XkBx3x4CwdvM6slPCUGLycfML19gduH/vFdtzmEXpogo1LGuUjfcuUWa24bBs+D5gcFRT3PvLvAheeQyQEjz6kuLFUHzsMOcFkOzie6sE1aPyVG7n4YiV4POObngFOvAHr9duF/6hXtBQoK8GD5KZX9Dlj9/M5vA/ttAbYcBKyMtxEdegTw3jOBsgBsGbf99ScD93ls+3lxv3bbYdCffQK4/o1bHqUAh167Hb+VFtj/oPZjf7Hd5nfKGH9Dvse+eK187/5O+/eDFwELfeCjl7e8r3ND4GNbMZM2bWnHe8l4G99+M97OuLAA3OLOwD9fOv6+2G6DBIDffzPw1Ne2nzfv38oDAH/1n2i3b4y/lwIcTfKx/RY3tzaM59g2bUHvy+2jC4xHiZGJkXH8Z4mRtQszYIO3FHKyC+VYEFbS9Y9zrLwCEPcN/uyUfFwrB6PRaHL7VoEn5OWr99hiUQMQ1YlBhInlZP23bNmCnTt3rgkGtQf/DVlZNrW/VhdK6b7Vuam3BaX08Lqz74MRBtjcOwClFLzipt/F8//nBLz0jPaNeQf2j8TvHfcPlsegWcYIA3zwwudhhFaH4Pey794KBQUjDPHKm5w16bNteClec9bdcZ3NN8ETj/t79NDDQ455NT544R8DAH796BfjFge3b2x68g3fj9edfR+8+sw7Y4QBXnLjb6GHBfzJTc7EC04/ES854xex38IheMGNv4yLdp6ON33v1ydy3+laj8f9jnoetg+24h8v+IOJfK8/+3547omfx5vPeRC2Dy7HK256BhbK9FuMmIbNAP91+T/gq5d/ACMM8YCjX4RfOuRBAIDl0Q40GGHT2HYxZ88//UQsYAGvuOl3AQCHLx6LZxx/Kt74vV/De77/BNzykAfhAUe/aDLGD3Z8C+/7wdMmMoZf8nMdTdNgcXFxKpHGbfaIBY4JnntevHGM8ZaLIAYm3grF/3gh45KVttW447EUQHSRX6vU8TldmLF94jwnXU3MHFNsbye7q5BrrOs4zmY18OW4rQESV+NqeSfaco5R2zu9NLfkxdauUWLk7omR6PWmt7z1CQM2bZ5W4ndvsfqWwocdC3ziKqAZAfcfXxjFHa6/OX2aH/N85aPaO0EA8LQ7tXeeePza9ruF/tpzKh+w+vr6uFiLPqWsb2sfX1zOot6C571pv/afUsjG1CXf4qapC9VR06CH9tmqeAFKYmRipNLuhpHFJcb1UK/XaxYXF9c4gzqCjqMKxLEwoE5WGIsNoUZxWxyUd5zT3y/QW8Msp/IAVrdf8AWNViNcNTNuXUfw6g/D6bjOls5uanvWtdfr4Qabb4PHHfO3aNCgjLcXtp8BoODN5zwQ5+/4Bv74xC/i0MXrrtqFXrYRbbmf8ptFBQXDZhnPP/1EHLhwBE660X+hoCC24DVNM+HTHmenH01GiHPT7YFSelPHam35XEiu4ynV+LJsymPUjCZyOT5rz03r+KYL740rRxdO/e4E0O2j6ncTWeRCnBcqEQ9xDph+7qFpmonfBi9evHAshAzcJvjqgomTtfbh/ffuBxSZmG+tOlXrp23dcW3P53XBCWBq+5Paura4dnnGAZkuaGdV43Qc1mXWwp75xbjD4fCrTdPcek3HJEuJkbsxRh52FHb+3Tl+4hxp9TrG0zgyi79VwUeYgqFSVvkkzUdqwz2VrrwMmx++ul0zMXLPx8jBYICmaWxAb8gdLq2WsUBd+x6jAsXt2Ai8yIxJi0qGu8rUapaOz47Atzp1suO4244Qi1Ze/GoyZz7qBMyHZQj9VFbuF/bl3xbQcxpUq3KM97vSs1wFBX//g2fgm1d8DADwrBM+veZ3qXrmWSTl4T7XaNSMcNLpN0EPfWwfbsVfnHUPPOfEf5vSu8anlN6aM659jUed9zySr182oGK/dfBZ6PXRG/XRW2gr4gsaHkeJAAAgAElEQVRjvxwOGywu9jEaNUDTYEQJpDQ9lF4PiEvASaJufWQ0atCMGvTGft9fWECvVzBsRgBWk+NCWUQzaoGtaYB+b3HMEWiaAkzGHD8TsbDQDtk0aJqCXulj1AzRlOnKX9hAP2vi5WdLgjimFIh4sajgEOPwxWHw4HY1sNHxam24LQOY5gO+cNYLXL74Zbu5cWq6cF7R8bm9q9SpXtyXgT1pfkqM3E0xcrVR+zzWRtEoYyRpBvUXAbnISYzc8zHSyTBp767e1kO9Xq+J7RE6kFbMu4y7RjCTTGvnx3KseX0lt3MVd+alV8QqL9+K1vEVBHhSuZ27vaqTVEqZupPg5InP6iwB0M5xj9vvtvid6/3dGp5JSdckXb7yA7z2vLsBmE7YHFvAajWcF3i8BcQlO/b9OMdg4eINmL4jx3cKavnKLXSDNDHzoq5WVeP2mndUHx1L82fYi8cJXVinOM/ta7y5H8sW/ZaXl/MO1zooMXL3xUgcfjSW3ncuyplfx+LTbr9GJ7fQ0gWs3o1wtuF+yi+OxSKSeehruZk357m4+NV4jZdC6AI1eEcu1O1d0Z8vYPl5Pd56yrrpRXYQj8O+EudcrnT5i2XlO6hLS0uT5/aiLb8oQ/NZLNrZFmE/jgN3R1rjINq7nKmfJz6w0MfyqduAKy7Fpoded4o3z29i5J6HkSsrKxiNRtfcHS43oa7CxUqpQ9WSWm087gNMb51wSU8rAMzHBSP3Z6dkcOGJcXI7vVhOTprsIC75Kc/g66oRi4uLWFlZmUoYQYNmCcuj7dauuxWV8Wa/qVwQ91emmvE3+l7iv9Xv003HmwnXnJmihv7XTP2dnKgpsHb84sbiI+Fj43FWv4zncA6JzRhNs2qnKTYswpROMy0yBxXsv3DoRHb1ZZcP3MKK+6wZgRaKypurZ3GeF2m8QFAZHY+a/Jq/eDE3qxqv3912T13QMWjzP2D6N1AcELm8WQMXHp9Bxr3JK2k2JUbuvhipdgv9Qv6avMPhcOo5IZZFt2MGTzcev8BA7avxGfL0+/3JojsuuFhmzRXBs5SCxcXFyfNO/X5/wot58sslOEeurKwAwOQHfFdWVjAYDLC8vDxZTMdFAPuFysdvsox2w+FwIkP01/wcP0YbPMJ2mzZtmvTheeMLRpaJ50ILESx3jMFzqfPYlVuVJrrSMdaX2yVGYs33PQEj3bwHbdgFF1dAWGFWnBOfJtda4mSFaoq4ZDyrLRM7GBs39AGmfw9AwVDH5L2pqpNuz3D2CGJZauAaW04UnI3mAIBvbfsYTrnk+VMOzzKxbKEHV1YcYDCQqtxaQePAcM8NxecAsljghazxA4ScGIPXpk2bsHnzZpTS/kbGQm9h8paiABiVm+dO3/7D4LG8vIylpaUJuMRx3tLDcsWYDGwLCwuTKpz7NxwOJ0C2vLyMlZUV7Ny5c/Jd7acVGrYJtxkMBuj3+1N+pOAe8mui4qQavsD+6RJT0zRYLFvwwuO/OcVfk5nmBfZd1kkTvRtbfVHtwzyUH78gQBd/DDh63MWP5j2l8BcFfNbFATrbl3XgMXXR62TmuYqxVO8awPBb9pLWR4mRewJGri7IOJ40xvliJOTTBZ9uHdW50/78j/NVF0bGxUm7xXx1vLj46PV62LRpUzdGLux9GMny84UerxcUI+Nij/2oaZrJ+oNfcqMx6mKC51lzKzD9qAHbNzFyz8fIWm4BNuCCqwYeYWgmrdQA03uttb2SGkwdWhd/0UblVaMqH/0BOA48DQanNxs9nFm3W7BsOvE8hgNFHieSLo+lWxDasVfH4uSswBfVnkhUS0tLUwGti4MgrWgxMLFsrJvan/tHIg9aXFycSqD8PT4zeETSjvZxXudPQVUvECPwt2zZMmUHlY9jgG+F6yJKq3y1BBd/R6MRlpaWMBwOsX37diwvL0/+BvgERaKKbSQ8L/y6V5Y5+rE9osqlccH99MJPAdXNMY/JsrGNeMGjsrGN2b8V1LhtJMaQt1YJY9m02qjtVS+OR/Z/tZlWVpUHLyr0Fb2cU2uA6mwfxFVcBUQH9HwMmN7alLQ+Sozc/TESZXostnNiZGLkhmNkvx5ziZF7L0ZuyB2uWuXTJc4QtNYuzjuniLG4nyYGngBO7OxQ8yT0OOZAiSfDOaXTNQKEZeC+PFmzEi4n3rAJO5G+0a49tnZLiDoSML2fugbuIYMDdpYlkibbPuaIq4SsE9svgi+qXwEc8TmOl7Ja7QsgCYpjzFtBneUfjVb3+OtFBPPk71wdGg6HUxVGF9g8NwrQmixDZwDYf//9J2MNh0MsLS1haWkJKysrWFpamlT6xnuIJzbnZwMCVNW3dI7YV9yibVYibQRoa37kFijsd5pX2H+Vt1tY6qJH5135cuzrIkrb8xwrKOnCQXOC5h0G7/AjHZf5ORuqHjqGA0Q3t2qHmN+maaZiK2l+SozcPTFykhka2DyfGLlWp8TIjcFIpsTIvQcju2iX0VOTrEu+DAiqpBpaqzxqCG7nJtglLpVXx3Tn1MF5DNeP5VEncoClzs5bMWoO6GSNz8oz/q5W4kp0tE7FfOM8V3HUiYHp/bBOr5gPTTIuSHWu+/0+Nm3aNKmeRVKNY7zvnAEl+DNouECu+WGASOjG1RIXyFxt4kTdBZacMLh66mwU4wwGg4ktgvcBBxwwORdbKnbu3ImdO3dix44dky0eDgyB6UVIzHU8l6BJXRd1bmHC/oOJzf3zHQ6Ywl/Yj1ycq++7XDFrIaRVWpWlloj/P3vvsiNJsp3rLXePe0Teqqq7du9N8uiQ4pxDjvQKAgFqwAGHegUO+BYEBBAgwIHAZ9ADUCOBU+FMCBAiBXV37a7q6spL3C+uQeRn8fufFlHV3VkHWUk3IJERHu52WZf/N1++zFxt14E8l/6jdWk/cxjEtT6x4nydFOlvOvnIjd1l65ioMsj1SY+1a7h+fmk58mlzpMtL/bzlyJYjPwtH2jrpliOfD0eeKo8SrswBKcWdL1fcYd04tBwDWB+0G4U/JtVHte70PhZXIPUruGj9px5LqjGq/I7JJ0fObsSeO5+TWXrXU32IaDq4KcAdk6XrJReNVNlqPX5c26dNonO9Xq+RW64LfclF55qPkYiSfc5xOF/f/ULKQQ4UVC4alVUyOUb0+lmdPxel4lpd5JmLjtZ1nUi13+/HZDKJzWbTIBYifXVdN+yTiYza03q9ToSiOqZf2n5u4tK0wQNwnloL6JMcJRUnIm0jR9I5n4Eo/VyfuOX8h+Ik6ITiBOjE4sTEuRRPhXDs+RjGfczmVD56jk/2fJzeXlt+fmk58olyJNcUzUloy5FNXbYc+fk4UuXQcuSXz5GnyqPuUpgDqIjIGp8Kk0dxHgXQ81xppyYCDhpacoaswtPioKtOf6wPx5R2iij8dzXSY1EIigJ0rt4D2R0AJAeKWtxJOOb9VRB14jxGVj5pIPIGOXS73UQomm8OuQD2usBX/0Mc/Gm6DH1RYtF+93q9xnH0rX3NLYrMAb/agJITv/nvHtH0SCEpHDnAVR1CbshyMBjEcDiM+XyeInqr1aqxGYnK8GPkmSs52y6PyEjH7HXmQNDtSmWXq5//x6J+fq7/7rrxcsy2fbKau8bHlptU5tpm0pKrE33rZJLobo4QaEeL+3COGE/JpC0fLy1HPlGOTP/z70jL9cPbbzmy5chjdqK20Dzw8LeWI58/Rz7aDZcDjxZVMN+1o16OGZMe0wG6cNRg9frcoz8nOCe+HDE4OPrOTD5OJ5VjY/7YecfAfLezd4uY3Bt1xHFi0/P10a2OR2Wv8tV2HYw5hz7qOZCEpkHoZ6J2mhKhkxR1Av/OMZcXn91u1JmJGiq4RzwkC60bmSiwIAOiYQr6p+Tm7areSdGApPQxvV5blmWDjCGV1WoV8/k8bfHrkxZ/2arK16O12q8GeJXHiciBi3IsHYXPepyJgutCz3V/PkU0uZLrp9qR9vEU2Kp95IhC/6sN5rBAdavkqv6rvqvlY7L1/nkf9H9bfl5pOfJpcmSuHLPxliNbjqT8Wo7MBQlajnz+HPkoN1wOYjnQ9Q6p4ZwiilPFDZo2vS3tnwosd9fqY8iNw4+rcbqh+mfPDXVQ0j4fcyQHsYimI7oxNGRZR6MNb1cjXnzXOnJyoSiIHCNejbxVVRX9fj8RBlE7SAQg1Oicg7mO0fvlDq/99MiX1wFgq1y0HNOJ1qmy02t8kqM6izjsduP648WMTl51fcipVzlznP+9Xi8Gg0GsVqsYjUaxXC5jNpvFer1uRPQ8Su3jpF0H9LzPPtRPjuyPEafrSM91u8yBYU5XuQ0MTmGOT1xzuKW6PoaFei4y1vPdZ/iskwwvuTSp3KTSo3C0lyNh92mV76eQcFselpYjnyZHuqz8e8uRzfNbjnx8jnQecNm1HPllceSp8mg3XA6A6vR+A6DXeXEDpTjQupJcWKrY3HHtzzHwVVDKGam2z/+cQ+h1Tmp6nrfjgORjz6WfuLz5firtAjnoY9acc6rj62+5R/0+XsAHslDC0FQJXwSseteFt0QtFfBVrm4jXJMD+BxQ+DiV0BhfLl0B4I6IRqpGzsaOAayPQ9vTseRSYPQ6rnXg7fV6SZ7dbje9z4TIntoQxOb1RjR3njo2Fh0V/VA7OvY4ni2FtU2uU7JzQHY5q/05JrhN5HTiPu/Y5P1z28udq4SvW2nruTmZMHaVpY9RiVbbVBtSLMi14b9xzTGybcvHS8uRT5cjIyLqeMh73seWI1uOfCyOzMmF81uO/LI58lR59DVcqvQccHqHVBmqADUcV6rfQasAdXtPPy9HOO4g7pR6Hcanu+v4mPS4//dxn1JizsCdaI7JnH52u92DQRSkDTT75X30yA2pGK4L7Zv3j+uRXa/XS3nndV1Hr9dLx5RUNFrnoMNxBzkf9zEdYlPUredwXMGN1A61k5z+KDmHo29OLhS1Uy3Imd+YYCkQaF+5xo85yHrEXPP/Na99vV6nrXNVXnqtTxjclhmlTmRypJoDWtehyzbnT1pHTgdat8s7d33Od3MY4d/dbvV3x6pj23fnIoTHMMDrPjZOT6PhGNfmsEjH0pZfV1qOfJoc2YGr5Em882LLkS1Hfg6O9LG0HPn8OfLRdinUu8NTIK4ddSXUdTOdQB2c87RoG2qMgL2epwCHM+SM0vuo/+mTlqMTzowRHHsMrd+VDDA8leMx8sj1tQH45BJGUx+qt1xkwoFQ+6Ggoo6k57ErULfbTd+J0mkeuqZQ5ACc3zRipOeqHFVHOX155IdjvnhYx6fpCNi424Be5/3wF206Qao+1DacRHKRUb4rWNGW60jBTMle5dDpdB68xDSXquEyzU0K3Z70v/uoFnz4WMoQRUn0GAG4DTg+OQmdIiva9PqdHPw61YPLxD+fkqPK2sepdvepOsnVm9PLMZm05dNKy5FPmyMpehPRcmTLkf89ONI5QP+3HPm8OPLkDVdRFH8eEf9PXde/P3VeDgi9c24s0saDY25IWr9fq8LiGgAQ8HbA83o45xTROGnk6srdtbsTqCHkDF3r1Lts748/stdI1m63i36/n2SxdziurI8SgKd6AHZsg4uzKMDm9FcURYrO8R/QIlJENI86PErkRKJywT5YHOwyhWiQSQ7QtD2OscMTstGJUVEUDxZdc463rfZEhE51cQrAqE+BCTsG0PWY2izAeszfcv6j+tJ+R0R6OaXv1pTTt6d07I6072TkMlRQzo0jB8w5kFfC+hiQe6Sf4pHVHMj6pML7quPMEcCpiWhuHCoz+pg730nWdevXOK6d6l9bDqXlyIdtfWkcGXF4Eq/4qvJoObLlyMfmSP3N8aHlyHhw7Dlw5Mkbrrqu/6+P1vDwmsaA3Hh0IA6AgACO50bobThIuECoB3LhHRIR+S0+KQhe+3TszpxrXalusDkgdyLR67Ru3REn13YOlHa7XWOL1qp66BQ5IlHAjoj0yFzfbUGf9M3a1KVpD5qH3u/3E3Gw6xKypT3VRc6wtV86cVBAyJEQn9XB9BzqA8jUJnUXJj1Px5xz0rIsY71eJxl5REzbUsJTMqzrfdRM3/viKSIqA77nfIU+MQ59oSQkRYR0u91Gp9NJaRMaMdVyrL3dbheVnOo26hMwlaOSsPuQk0bOx7yPGpV2+8pFBrXPbnvepgO9PzXQtrwd/6xY6e34NTlZUoeTIvX6C1p11y6XlxLlsYlJW/al5cgvmyMrGSNj8uh+y5EtRz4+Rz7E6JYjnz9HPkpKoXfKwV4d2ElFwUk7nnsng16j7TqYKwlomxqNUcG78DkPg/Y7/9w1OYPMKeEY2fo41AldXsdkrwaNAR0iEA8fYatcdAEmQL9arR5EViiaPgEYKJlozjmRoara77ikRK6Lf7V/rrecTLVv7sjr9TpNJOq6TgBJXdibg3G3220Atr/p3nWY65/Kliiq6g7bol7qpv7d7uFiX9r0SRB6c5moPLQuZKF1dDqdWK1WyVb43u12GxMw7SPFU3A8quwy+hRfVvLzCcuxSZXbs8oyFyGP2E9MNCKq1+aIyfXghKTR0xzeUb+TsOpVsYuJhOOUk5j2w/FAdaa61/NP4Wyu/235+aXlyKfLkblrW45sOVLreGyOrKuHONpy5KH+L5kjT5VH3RY+51QUBc6yPLzkzo3DBwbQeO60DlqVzG8azVFhOkBxzA3nmJBpU4nKgVYjOjllHCMTNQ5AVknVr9e3kisBQQaNu/kkt9NGo4CrIKsGrUBIe5pjrjspkRah7wpBRvqnUTJ1VgUWZKT24GBGVKLb7SYZqdw4j+M6RpXDsQmHk787s+qn3+837EnfscLiaK/bb1hU5gpCbre5CZ3aFHUqYSppKsmiE3ZjWi6XsdvtEvCqDx3zce+LT7hUhg6S/rvK5tiESgH52PWOHUoEOTyhqC7UXryo37uO1E7cpnUcub7nMMrb/xjW5Pqi4z1W58fqb8unlZYjnyZHuo5o0+XXcmTLkY/JkYoD+ELLkc+fIx/lhssFrM6e66jeUeciaJzvURPq00HqnbY6ot71H2s7Z8R6XBXoxOdj4PxchN/byZGIy6Cu60b0TeuHJNXBvA8cS3KwMQLgSliASFEUjZxkNSQlrLquG9E6ctGJ0rEYuCiKlKMeEY0UFsbhawrULlQHRDWoExkoqeSitNoe7eiEgJQUz432SC51uK58MqG/6XfSEZyAvF4nDvrm19NPX3CsEzfAXHeA0kmHy4/ftX/L5TLJ0SNe9Fcjhjmf1clCbpz0X2Wt9ausdUKFzareTgGv44LjAzo75ctKHHqtjjdXcuSSu0btxrHBMTV3Lb+pvdBvxY3cRDY3eeWajxFKW/Kl5cinyZG5CazqqOXIliM/F0f6uFqOPFz/JXPkqfKrb7jUqCkfIxIE5WCnQtDOq/C1Xb1OB+x3+doPFKSGUhQPd09xAaqTaR+8HXVm71+OpBpPoTLt6m/0lWM5J9KIWFNHjK8ZadWIDOC8XC4bzs1/3thO/yAQUiK63W4MBoNEJBq16/V6aXwOOiobBfhjUUQiX/QLUqRoZO6YY0MegKrK1wErF1Fz8nCZKjkyZp/4aG49RY+pXNwOkAckoaSh/VJZaOqDRkt9IqITBfqJ/rbbbXS73Viv142JjBJ3RESn2su0iAOhoetc8cmKHnOQ03MpGpU+1oaCKN9zdu2ReR+b44USmN+caL0+6UNH+t1loLr0sTgeogedNHjkVNvVlCcdQ843c8Tblk8rLUc+XY6s6WvdbKflyJYjPydHKopyfcuRz4MjT5VffcOVU1yORBCkKt076ODv9SkRcdwNC8f1yJf3SwV3ysAVVL2/ubt3JwMfm5/r5OOE6gaTM24HPyUHf2kc8tG7+LI8bHuq9er1OkEAwPWv0+nEcDiMwWDwYBEw5+ujeMBK68bwc7LGdnACJVYe/6scFGzdXhg30St19qrav2PDScgnnDnHy0WPtS/q6Or4bktaj0e21D6UlP036qYPRP5yPsa1uviXokRIm25TastEVrdxHwWNOoH1p0yYchNBl6vqQa/RP5Wjt6HlVHsuB4qPw/WsWKS651onnVxbPpFw4sxNenyi6BM07b9jqY8xV98xnGzL6dJy5NPlSDgo12bLkS1HqmwfkyMjmnJrOfL5cOSpzYMe7cXHDqA6QH73R5t6vjuFD0h/yzkyhbtvvcE41WcFkhx4Uyeg5OCk9TmB0GfqyBk0oOp51Hq9GqKDgUcjOKYGc19ro34eowLoGsWr6zqBKp89t5o/3h+iC4D1mC4GZiwOUEp67ngu37Is0/k5MKW+HOCqPlTv9G+326Ux0747q+tHz9Pf3KE1UshCTwUEd2qfJClI+USM4wpiuQiQyt0nU+qbudx9CMEjdvzG+bmIJHXmMCEnW/VL1S//fWy5cSoIEuHUcx1XXMauk9ykU7GAyQlYQV1aT25C6XW7j2u/j5GT9jEnB7dbZOiT1twYta/qV235eaXlyCfKkavVfYXNoGTLkS1HflaOlLHknrq0HNnU25fEkafKo22aoYNVZ/AohQ5AFcExNTB3CgcXbUfPcUGduvaYMWu/lQD9XAcel4v3wxVc13XaVjY3LpWNbvOukZmI5uN/ImR6vRaVq+pGSdUfuQK4LLQFmCAbyERTJCB1PmvUUEGNot9zMtZFtNTjNuLRGHUSBQfNc9d2iXTgOETyaNPJPSIeROTUB9wudFLhkxjPW1cZ8xt9oM+0zXcdq47XZaFFbUkjRegcItGUC0CT86ifsaX1ANGcODAu7Y/7Sa4vuSgm57sOnXTVFxyrlBD4zTFHdaZ+rOSqdpAjuhxIe1TOx+MY6fio1ziR6G86bu2L2qHLXOVE27nNCdryaaXlyKfJkXXRxEja00lry5EtRz42R/p7KluOfD4ceao8yhoubVg7oxMwFYwDsDuM1s3g/biDhf4G0BwTsAo2ZwyuZAXYXHRRz8sZqyoR5aiDeR06Dh2ngp8DZkrlksfiCgZ1/TA/WPvnzlnXh6iiAzg56brol2OAmJKHG3Kv12s4Hn2ALHI3ido3lbXfXKotuUwZsxIHelCi1vO0fh3PsSgP50FCnK9Eo+fngMtJnnb0sTfRVaKw3heftOQmMy5nQNMnEvRNZcBnP8fJoY7T+nRyU0AnUqu4oICam3h4wWZVX9oO9qK6csD2KF6u7+rvGvX083LYc2ri4WM6ds0pvPNx49tOJBHR0L3jwinfbMvx0nLk0+XIlPhRxwOM4LqWI1uO1L48BkdKwlGDn7y0HPnlceSpNh5lDZcKzIXiinbjxGi8Hj1+7M4/J0TOd6d1I8k5mYKbFxW6OrwrVtvKkacqi+Ln0R/fVceV6UYPkeTI8CCHw7l1fXgSwWcFDd3piEW/+r/T6cRgMEiLgDlONA9SgWToP9FKdKQLfCOaL3h0O1AdUT+RJLXBXGSYMfoER3cfUvvR6xXIdeLkzkVfGIPrXPvlkx6fPLg/5SZQqmuIxSPO7p8e6VOfVALzqI63nQMXyOjge03/cFL1iZrarfeHY8cmRn7MJ2a5lAmd/OauO1av25X3LzfJ8DqoX21Y69aJgf7ucvc++mRB8Tg30c6RcM4ucvbelo+XliOfMkc+jITrxKrlyJYjPwtHyjF9OtRy5JfPkafKo6UUesfUWHUAGv3IAa52+BhZqHC1KNjw3fvl/VXH4L87To6EtL/u6PrdHZrfeeysMsotttN+q7N5FEn7TFQn4kAwVcUuQweZq6z10a/3eTAYpL5rWgQkMhgMGkTD+0UAe5UhY1UyyclVx8y5Hr1ygvH3jbic6I/qCV1QFFioU/ui7XMOYAiJUI8SmE88FNjdXtw+cwBPfzS6Sr0aPVIbczLW37U+bYdjbmfaRs4+nRRO+Zy2SfFJY6745CHnr+g9518uf71edavyU3t1XXlkzcfhY9Y+ut3r+a6rHFZq8Qhsrp85ObsuHdO07rb8vNJy5BPlyOJQBzc1OhFvObLlyM/CkZbK2nLk8+HIY7qIeKQbLu2YCjEH7K4oSs4hdCDHlKztHhMuAOZORf1atE49z4sKOkckbnS54xEHg8gZugI/RceROx8j0jSA/XUPZe3RDJWlXsvxqqpiNBrFcDiMotinPfT7/RgOh2nLW6J6RMWUpBTUHWRVdw6watjHJheuRwcCtzW+q9xz9uN6875Qj5OLj+MYoGrajOtBAdzTSzwy4xMU9wf3I/SZI5+iKGK9XjfkootdPS9bJyc+zmM6c1/xiLvKJ3ety1p/z33Xul0OuQmfyiw38clFJL2/bps5AtKxez+OTYbVX9Sn9BzFz2OTRIrbn2KMy/gUmbTl46XlyCfGkdRZNPXRcmTLkZ+VI4uPB01ajjzI9kviyFPlUdZwqWMeA3SKOkVOsEooagg5UOAaP4ei52vkS6/1x545ElEAPEWG7rwuI63bI3E5J1Dj9t+93x4NdUCudweD1nxfrZstYPUzdZDLHRGNHZdIl9AXOxK508XDRbGPcJG3HXF4vK/6c/kpEKtT50jfI5uu62OA5iABYKrsXZ6uV2Sm42C3Jc7zrXQ/lQAV5BU8dNw6DvctB21knyMs6s8t4lUSIqKncvX0nnspZX1M29bfFBiPTdS0qH1gpznZOsEq4TuGHNOHyt0ng96O98Fxg/N0YqATFq3vUz7nxqE6c1w8dp72S+0G/3W5tuXjpeXIgxyeGkcG19TRGJv6eMuRLUcin8fiyKJzeBKp8mg58nlz5KOs4coBvYOcHlcw1YE5eLoBqOD8Oiciv/HQnWq0Hj57/zj24MZFxprrn59z7JiC+DEidPLzenKy9X4mYEzXN+tSQNhut2kbW8YF6BCVw1khC6J2vvWtG/Ixg9aoEDahYKJOqETi8nF7UntQx8/ZpUY31CbcLtwOFZi0PcBGAZyJljrksbY+FQjVDj36pb8zdo7R7rGIupOnT0C8uIzruo6iLB6c4wCeIwc91+oiUf0AACAASURBVAlR//tYjpGRTwRzxKbH3Id0fE6MOf/O+WxufLkxeV9O4aPKQLHOJ70+Xs7L4ZjLPkd6H7tZaMvD0nLkE+ZIwx+vr+XIliNz9f1ajvRdCjmn5cjDubkxfQkceao8WkqhO76X3DE97uDBsWMC1HaP1efO4g57zGD9GidMJzzvS84pXUYUDIHFt06K2p9c/3OTICe6/XX3xhIPH/FST10f3icC+BGJ8+1sdZycp4Tj46RuLaciF7kIhpJADhS0P9SvgK7Hcs7v8lcZamTE7Un/syDX687t7uTO7cdzYKY2rQSq+szZs45ZgTAXlXTdAs5899xn6s9GoaN4AHDanuv5Y5OqB/VnfEs/u1xyUfwcbuVIzonZbdBTX9xfnei1KPG5jarefMzefz0392TCr9Nzj9kCJRcVb8unl5Yjnx5HpnPj4YS+5ciWI2n7sTlS283JreXIL5cjT910PUpKoX/2CZafp99zA8gRgtbLdV48CqMK1N+85BRLyS3m5JxT4J9z6FNFAcuv07adpHRMCkQK6A3Z1w+BRdvZ7XaxWq2S4Wg7vnsRDrDZbGKz2aRInC8G1igVYMSYtX8qQ40w8F3PcedUXalMc4RJ0e852eZsTWWuv+eigRHNFKKcfRLZc7LwcbgufDKTm+i4TN0//bMSk+qI9AjAUm+s/H+SaYW55SNDOTzQ/ru8VWcfA9Vc3epfOfLwunP1ap8UqE9hi9ppzuf0nJwfHJPZKULR63yCkeuf207OL73dtnx6aTny6XJkbow+mWs5suXIR+fI4uHNssu45ciH53wJHHmqPPqmGTkj887klKi/qSByhpRzoIjmI+FTgsgZh5/v52jUIne9f3bF54yPknN4LTkj47sbqvZfjaMokE1eJzhbXe8jUJ1OJ21Nq3nrjN8fSUNAnJ/TkS4kRR6aB65ACHiqzj1S6QB8jAxcPznbYDweqXKdql16tMZllGvbCc1lRNG0Cm6eOaYRHvp1LKriExS91mWpRHks6uxj8/6ksdyvGSzi4dhc/m7XLpNjwOi612u9f3rNqcmUjzVXThFh7inQMZ/W9r1/6Cont2NjPRWJ1ja9D/iVRs6xtVwf2/LLS8uRT4wjad9wquXIliM/N0f6+FqOfFjHc+PIR7nhcgLwzuYGcQy8c8ajBn5sgDmAzNXrER8/T+vUzyiK6JQa+zECOVZ/ThaqeO2jAmMO1PjTl7S5jPdtPhyzA4jKebvdxmg0is1m01goSiGXnT7udrtYLpep3l6v1yD5Yy/MPCYvZHBsESJ6UBnodTmAVTmqjHO7J+XINzdRog86Lo0iEtH06/jvgKH1cJ7urKRj22w2Dbvkt2NjoA6VncpC6/d+ADb8cX1uQXdVVVHU9/os9mNiW+EcUOdswrEjhzHuEy4jbYe+6vVOpl6OkaD3E5kQ4cxhgvury0Kv0ehorm0f/zH5aTv+u+tcz8vZ67GJQls+rbQc+TQ5cncfGGJ7+NxEsOXIliM/B0e6LluOfP4c+WgvPtZBaPmYMuiogqkb3Kk6P+V4rn9q+OoQx87TejSKpH3OAZDW50am0R+vJ9cfr1PPGwwGsVgsEsj7I1wXTa4uwBHj1GicRuUABqJ7mhaxWCxis9nEYrFobIGr7eljea7LASBOpQauESyu9bE64GhUgt/VYbX/GpXkGm3PJwX85gt9Oc/J0MeTi6QpsSth6BbFPumiDurPRTl1LGqvOm7aqKoqrZnQ96ZoH5xwqEfHU9fN6LzqSvvu9qx/HkU9BrQeFeeYyswJXCepObLS/9iDyohrdU2Cg/2piWpuwuJ25tjhx3KTptzYfTveY9js9voxDG/L6dJy5NPlyMPFD2XUcmTLkZ+LIx2zW458Phx5qjxqSmEOFFxBKFOjOQ6yFBV2xMMtcSk5ZalS1ID9d3/E6JEN7cPHlJxzCu279kNBxiM4DgbUmTM0fpvP5xER0ev1Gm0eHmVDWs1x6GPSoigScUAEZVmmLW0VaNm+FufhpY673S69VHK5XKbdmWhLc5w532VDf9QZADU9T4GYz+owDhqqd5UzOeTaDrbhW6jqtQp4qpMcoOZs5BiwO9D6Ggmu13acBF2eOdJWe2Os+s4VJxElYfdF/SvLMqUURtQN+eYAWuXh/cuBnutVZZUD8Fx7KiOXZ64OPU9t1CceORnr2HLgrBiV669fp/aZm3B68etVd35tbjxtebzScuTT4kjdTVUny9rHliNbjvwcHKml5cjnw5E5OVIe9YZLy6dGVHxA+vmYUfjxnIHiHKokBOiREi3+mxuYt6PjcFI4ZpA6Bo+AeMlFdbQPfMdhy3L/aJqUjsN51HPIrdYx4uxE287OzqLf76dc9cMLlJu7MkVErFardJ6CMXrY7XapDh1rLg1Ax6nEpZMQjZq443rEQsHYiUgjeJznUT3Vres+NyGhT/SRKBjn894RBXgfv4+L491uN73bw2XkoK79cVtysFe96PtDtA70jp5Vr6vVqkG8+/HJpCaDAznfhrS8f1pc5u4fOUJxks2VYxO73DnaX5/A+Hj9mhxQnwJoLzqJ8nG6/XCutu1E4rimpK/n5fTVll9XWo58Ghzp18INLUe2HEl5bI7Um7ZjONBy5JfJkad48lF2KXQH1t9cUMfAk2tzERcVHN9z53ONtuGg4XV4X90AFYSozw3VFwrnDDNXv57rsvOoostSDUblgbN7tCW1I2TAb1VVpWO8pHEwGMRut4vxeBz9fj8Bh5KWR/50TPr4WPN2kQtjPEbuXAsQc163200vSnSZQDYagVUZ58gBGeUc0eXvoKHEEdEEUScZJUC1h1yb2h72Vtd1Sl84BRK5fmtdWtT2uF7bVPvY7fbrD5RUOS+XbpLWcMVDG8xF4fnsYJYrx0iTa9X3c9fmxk//mYTlJiXqa7mS29ZY23JgjojGZMN9mnPV545NHlyOp8Z5agz+5EB1BU6s1+vstW3Jl5Yjny5HlnY+fWo5suVIlcujc2R1mHrrjX3LkV8+Rx67LuKR1nDxX0Gcz7k7cT92DFR04GrYlBxoa39yxJEjvMZE0QyJ+mnDgVD7o2BxjFz0XH7Xt6+jMPKS3TAoustRRDwAa62/8U4I6RvRtrIso9frRa/Xi8FgEFVVxZ/92Z/FX/zFX8Tf//3fx3Q6jcvLyxSxU2cF2CEplRntaKQK0lLnR16qR9rwcdZ1nSJgGv3I2Y7KWsH2mANyrdqt6hswP+awEB3Xd7vdWK/X6bj3i7ZyEwtNJXHwYLxKml4HcnL7y9VHPaTAIF+1HfVBPrseVBbyLR1zcnDgVR0xdpWP6kPtxev2icSxiZvqXBeX52zIbYxjucid68EnCnqcCQg24td4G4qveszr9/4d64NPrDQq7ph6DM/bcrq0HPl0OTKiiS06rpYjW478XBy5qx/e6LQc+Tw40nWo5VHZUzuiwtCOE105Brp6voIN9fOnA9QBq3Ep2Lsg1JG8nxqVorhgVZEAZY6kvPi4u91urFardJzFvA4yKgf67SSMYW42m+j3+2kXpP219+O7rwO5dLvdlNNeVfsXM/b7/Xjz5k28fPky/uEf/iH+/M//PJbLZcpHV2DlepWzj4++QzYQDy+A1HQMrtUoSi6SGXEAx16v17A5jpNTr5EPnaTocW/bSUSJ0OtTu6B9/ezOrLIiurler2O9Xqcdr1i/4GktbudKUppKARCrXSJj1QHy5zi7ajFGnSA4gSro07f1ev3AXvEF1yPXqL/55IzrdcxaV44otK+5yLB+VwD1/up3+qO+7tilUUytX9NPdAzapj6RPiZrxUfFQi257zpO2qX/al8uzxy5tk+3fl1pOfJpcWS/32u0q3W0HNly5OfkSLW7liOfD0eeuuF6tG3hNQJGpxGMd4ri5KF1OZkoaOI4/MbjeD9XBelgGnF8Z5hj5+jvCjYRh8fh/FeD04IhQRr9fj+qqkoLepVY3Cj5nJOl3/kPh8NU5/7cJsDpS/uUjBjD9fV1/O3f/m381V/9VfzN3/xN/Ou//mv88z//c/z7v/97ROxJ6M2bN2mbWwVKwEkNFZDq9/ufBML0zQ342ATAHY/f1YaoT/PQXe+6u462jZyQjxeNYvlEBcLgHLVf9RUHxpye1+t1AgGP3qkcsDP1GdYt5Ap9JmXM++lyVvthXJy73W4boRyVId+PAbaStEYO3Se03x5R10g1RY/l8MhtjJKbyKhc0Yfbag53dBzUrbZHW4pdTpraV2/Dx+eR2tyYc33yuvEZTQdqy88rLUc+TY6c0ffiwA8tR7YcmSuPzpGmL86lnpYjnx9HPsoNlxuFK0YNMuJhuoQ6DwPkOn0smHOu3N2qboueMy4VEm25M/tnJwUfC/X7nb2SYKfTicFg0HDW5XIZg8EgptNpI4IV8fCxr45DwVDbKMsy5ZOfnZ3FbreLu7u7oPtFNMeii4chGXZQ6na78U//9E/xL//yL/GXf/mX8dd//dfR7XajLMv4t3/7t/jHf/zH+O6771JKA4CrIK657+pUjHW326XrdTtVlb8+Nck5LoZe13UjvUKJgTFqhDNiv2OVb+OaczCNeijIKRGw0BdS5k/BFjByPTJWftMIH+25XWrOOeNhHAosahs6TupXvTCZcRmoXtSO1+t1g1h0HBQHe9WhY8EpX6Nu/eyRdtrOYYViCef675oLniMu75u3rbJEtrl2c2BP0UXRWr8CvfbPcUaxiPNU7toHx16VWY743Cba8mml5cinyZGTySRuZMyR+dxyZMuRj82RuyM80nLk4bwvlSNPlUd7whXRjLbwWR3DO+kOkvuu/7W4MWoffIGunq+gqec4efCfHG6O5UjJDUaV2+120/axu93hxYdlWcZoNEqRx6urq5jNZhGxBzh2NdL+U7f+Ua8CG20MBoMYDof7tIQVqn54p05OclEUsVgsImIPwKvVKs7Pz+P777+Pv/u7v4tXr17F119/HUVRxNu3b+Pu7i76/X5DLkokZXlIi6DOsmymMXC+ThycKABqyEKjUAr02Jo6DEUJQ/VMuoHuKqTy5TwIQvtN2Ww2jfeo8AcJQihqh+ymRIEcjkWe6LtHdZAd8qA9HadORLT/PvGjPq9TfYVUDo6rv0JuZVlGcb82ooiiUSdFZazf1YeULOmrRit9gqIEz/V6nUbatE6XbY6I1L6O5bKfwiufzOUA24lVi05aVKdaJ3X5ccU+HWuuX9onHZe225afX1qOfJoc2ZWx6Q0OpeXIliM/B0d2Op1Y3tfHdS1HPg+OPFUe5YZLlaOg647njkpRReWErnefOlitT43OgYBzATF1No38qKFp3XxnrCrUnICLokgLa+n7bDZrREY4ryz3+dX8Xy6Xqa8aSfAxY0zD4TAWi0WDtKuqil6vF+v1OuWOTybjiIjodLoxGo1ivV4nwhqPx7Hb7WI+n8dgMIjVahXT6TSB62QyiW63Gzc3N3F9fZ0IQZ0d59VImeoBoGQculhVo1+MUSMQlN1ul14kqQCNcwF0Sp5Ksjmdqd0gd0Az56BOnLShkTn6wTVKEhCigh39YwxqNzoGjUxruoXbv/fNQVtJk2MRkaKPGnGkeO677qqlRAXZ97r3ayOKaNiy6ssn8roQmXrVX/lTQPcImU8wlQA1Gq7+59HY3CRTJyx6ntpEjqRyxYneU15chx7F1z468am95qKPLqNj9uJ9VTm15eeXliOb5alw5Op+TWJVVXF5eRnb7TZWq1VERMuRUqfaYMuRv54j1/XBD0glbTnyUL5kjjxVHvU9XApyKnw6Q6cduPldBwsg+AJD6sh993pVwBibLu50otLcche2kkxEMx9dx881m80mtUV9eq6C8HQ6TZ8Hg0HqF7nry+WyMTaIh8jYeDxOL1AkKkRUjXrLct92p1Olp17Us91uGy9xBNgXi0Xqy3A4TIt/WeSrulWZUwf94/OxSCty1PdvIEfXj5M79fnuSG742BVRQLc72mGjEb/Woyf+G/Xrn0dN1C/UXpWUNXUF0lDQ1jF6PQ4e2jbfNYqp1yk4RkSDtJSsvaBfFjNjsx6ZpM3chJLPbMigMtEJiMoTO9FxOwmrjymxY3MuU353/audK8b57y5vPaa45gCu/cR2Ne3H9aw7tqk/+OTCbTCHhz5WlZXqUNNx2vLLS8uRT4sj1/ebYRRxeL8W/tVyZNPuWo58RI4Uv6Zd2mw58vly5KPdcCEkBc5chEANSwfiv+vdYs6BGLQLxJWpxzTC4ICiUQIenXu9fPfrI5rvr9A+adQKQ0GxCq4omNQFSAUH1WgVb6Zn56S6rlME7sOHD3FxcZEW3kIyZQFARyLos7Oz2G63MZ/PY71ex3g8Tg7DfxyPyCOgojnnbrBKJDinRlzU6KlD/3KpDf4dW1O5067qFaCk3w48lLquo9/vJ3nrubStoKvA5TaIPUFc6gMKQlqnXoutYms6Tp1Y6DXIDLk4UDOB0Cijyoo20auCh4KvykDXgdDXNHHYMc7mO1jUBlRvqmPts+fmq6ydAGmb3709xSds0oFc61LS4HfadH8+hkU6IVCMyUXXlCyO4ZgTEwX/UvKtquqBHjXVQ+1RbVknTt43x722fHppOfLpcWRvNLrvfKSbqeVyGYvFouXIliM/K0dqaTny+XDkqfKoT7gimsCtCnIjQZmqoIhokJAqkwFFPMzj5FpVqLfn0Rc9x5Wjv2nxMXnfvB9coyUXiQF8VAZE4cqyTItw1TiJdGiksSj2Oy+xyJhFm1VVxXg4jlhEVNU+xYJ+EunTNgAJ3a6Wdl32jB2S9v/qHErSHv1w4lDZKeh6FJjv9Fkfo6vdqaMreGne+3K5TP+V5Fx/3lf6oW2qromO8lllf8yOiDIxdrVhJQ69jrY9kqqA4WByDADpq0+aXJ70R/VSFEX0OveR4+Iw+fJxqt41sqmTNtcj/aMutweP2Dlm+DoDZK161olkDiP0N43iA9yKebmJq+tO5cZ/1mC43eq5Sp4qT5/4emrNMd2r3eikQfXmBNWWn19ajnxaHEl9k8kkFotFdDqdliNbjvy8HCnjwRZajowH/dXypXCk+4OWR08p1MFqcYDmvy8S9DtyPdfbcBDSNlCw5njnDNP7miMZjyQ6yFEUPLQNrZf/aui0g/Oyq02v12tE3Xa7XTbtIWIPpvP5PHq9Xmw2m1itVjEcDhNRHPpxcAKu6/f7DVAghYIoLH2MePjo2CM1CnpMGHSM2mcHd40SkVOsQKoTEKIVXAtxqvw94qRgro7nNqS25mCkvzM2/h8jLpcR7eukwiOWyFmje2rzSmB81mih+4mPKedj/hge/0HWmqNOnzWSx4LbHABRn/ZR/ZJ6FPzUl3XMPglTvahfefvarupUI3A5AFUbcptR/Soxqm5ywK11q535DU0uQqm4k8M8Pa6RQOzfz/HJN3Vr5NOjeW35ZaXlyKfFkd0jOmo5suXIz8uRD1OFW458/hz52Z5wqQL1NwfrXDQmd+0pR9f23Pi4FmA8phxtx+tWA80ZpjqnO6p/ZnwoeblcNlIKAPXtdr94d7VaRafTSYuGO51OuonSnGxy+amDhb3azr4PzW1WAWTNT0cngAjgf0w3GrFT4Hf9KJAjK6Ja7hR6vQKiRyA0IujX6h9gAABSnxKz6hIw0DQPJ0fG5RFJ768SsdqS1qXgo9E5XU+h0bucbLSunN25fR+b9Km8FEQ9UtTtdqOq9jt1uW8d+tR8QScTlRyQ0VcnX/Sci1Q5AahctChZqYyQreOWkxI+4mNUkuWzyjwXUeN37adGWBUnVO4+keMzdulpKVpfRKT1H+5v2odj/u11teWXl5Yjnw5Hwov1rk5PbTylq+XIliO1PAZH1lXzCZfKqeXIQ33azy+FI0+VR90Wns/6506tAvDO6ragqjD9nnOAnCHkzlWjdSPGwDRSp/2kzhzxqNLps/fRDX2326V8aJeVRlE2m02K5pGzThv85xqIZrFYRFmWDXIpyyJit+9HFVVjvOySw/jUoNWxGaOSx6H+5hvRNTKiRMMxZKdpH27EHOd8rsF5lUjoh1+n8ta/iGj0XaORSrAaIXTbUdtQEuM3QADZeR0+WVKSVVtR2yDSpHJUe/P+qk6U1Kjr2OQnF/lUWeluSeikEe2pH/ookWWPOmrkmD5rJDkikp/ouepbqg/FDAdn/ELtxK/3ftMf9VPkrHXrRCRXjxYlNJ0IOX4qSSADxqz9U6JVfTsWkYblky4dv9qETpbaG65fXlqOfJocmZ4eFs2JmftSy5EtR6qO9ZxfwpFsYuY+0nLkoXypHHmqPMoNV27Bm3ZOO0Zx54qIBnjxW+5OnnZcaU4Qeo0qwZWGAnJ1qIJ8PHqd98vr4DGoRmKIrnmhDq1bUzGIZCyXyyiKIu3UNBgMUlQFw8MYy93hkW6nOuTMOvjnwE/TAgCSVK8QSlEcopIOtK5TJwhkqVG2zWbzINqGvXG9k51er4DJ54hIu1cx7qqqEqHmSBTd81uOCLRvOha1UVI7fReo3Gd1dCUQPaZgriCi/dbvTibeHv3X8xUcNXrkkUT3jcDn41BUfj5RU2Dmd41O0wedJOnkBp1rf31S6ADc6G88xCT1O9rVSJ/atvcz9/1YoT/gEhFtJ0V8S6OFToLqTzmcQ4+Kg6o7JSGX1cfG0ZbjpeXIp8mRafzR3P5a15W0HNlypP5/FI4U93OfbznyYfmSOPJUeZQbLhSqitDOaKcUQJwM/LsLQet3pTkx5c6hr240x+52VRFejzstAAuYuRMqSXGc89UAvA3OZSGvkorKHMdjo4zVatUgkJAXHmuUUZ9KYKiuK67RPwBXZcQf9eccG8dQG1FnUDLgT989wuP53W6X3uPCNrVOIB7503EoaeT668TBdwdnrVvBQ6NTWocSjF6vgMJvDlRaB3pSW9EIppKPA4n7lH52EHHQdZ9yAlDC2Z/YJAtfm6B1OPirXnxXIuyMCK72wycn1HkqlUGLf8/hlYK5ygsZnppU59pQ4vLx+KRL+6w45jr1sZFSpW06IWpxPPaJfVs+vbQc+TQ58pjvtxzZcuRn5Uh7UNRy5PPhSL0h9/LoKYURkRVe7vdcZIfrcgCbIyAtSgh616vGqVED2s1FrmgTwNf69RyNKtGm90PBi/Mhn/l8/sBR1Ei1n9TBm+7ZNYmInj5OVcPUPqoT6uJMQFg/k4+rgKMRLwfWHAlrn6iXPilxa+66EoD2v673efZ3d3fpN2TAtdQFCZFugkwUcJCF6hsZaLTE9a0TpmNEoI7skRJk6ekcmneuclUi0ImE2reSIzatNkT9ERGr1arx7h4AHoDUa9z2fNJ4jHTYpTAnOyUk0id80lHXh7ULGjnziCj2leurytNBXe3aZa54o3XqRFQjoLkJk/5XG/DfdYKgvx0jfdW9nq+RXG1LI3U6llxxP+eY/m/LLystRx7af0oc6QlA6ustR7Yc+dk4Uj7rzVbLkc+bIx/thisnfP3OedrRXHRCwVsdVx3EDcQNWqMJGp0pyzIBpF7vEQ9/VE4/tD9lWaZImYIB56sjaJTO84sHg0FyZkAvR5hECBU8/JE/eeyMwXXCGPWPPtJvNW6/UVND0xQJBTuPLPnEQeWg5+vb0709xtDtdmM0GkW/309P8IqiaMhU69cXayrYluU+79nH6frVNiBgB2kd2ynZ6dhoB/tRgOA7fy4TndggE9Ym8FLP3HqA1WoVi8UiZrNZLJfLw2Lx+vB+GsbpkUcFTh0TgK32CuAOeoOGfJC9Rug0DceJkTb1vSXs8IS80XtukkDBt9SmHFB9supyRgZ6Hv7lMvFJaA5fkJv2lcgax9S2tQ4l5xyp6zjUBrED7bO2lZOJRiCdKNvy80rLkU+TI7dMmqJ+gAEtR7YcSd2PzZHR6T44r+XI58GRp8qj3HDlQP+YoPnMfwah7xOgDr5rpMiV4KDjTuEEQYRCDVONwxcl81vEHuwxBP3e6XQS8GCsRJUcVCA36phMJo01VxpBUYfVKNNgMGjcXdN3cqxxXvrEeRQFeT4rYfrNmeoFGfIOBE+Z0AiDgqoCrho12/OqHVGv6j9iD5zj8Tgmk0nc3t42rjv1biCAS7f2nc/nDXmo0zqIIWMIRZ0QXfpkSv84pjY7mUxis9nEfD6Pfr8fk8kk6rqOu7u7RCSDwSBub29jPp83FsQiD17syX/fQQvA5Q33fGZnL+pZrVbJxtRO0VdVVQ+iQ/wVRXOhL7bc7XQj6v35vIQbm8TeIiIt+MVm1G+dHDy9iHUW6qO6BkT9jv7qAnwnONWPykIJArnym8pNf9fF6qp7tTOdAOn12JhHOzXS6XblhHCMMP0z5zleqzxcjm35+aXlyCfKkaYn6m05suXIz8mRa5nn8863liOfB0f6MS2PcsOFwJyM9e6PDuu5Hs3AqakTR9aB59pmoFovbVIfBk+EQO/qtT1VoEZVUGTEIcdZ0w1YZEpfMKherxfz+TyBvRNqxB4U+K+LIAGv9Xrd2PFIH2+v1+vodrsJWHVsTWCvHxxDVgr+2j/VC3JDDmqACizoC1lHHCYAOBfgz/+c7QAyfNZtdzk2nU5Tn1SfSura7nq9Tn/dbjd6vV7KbY+IRmoFZInOAGaNjuqEQ8ndiVr7s9ls4urqKjabTbx79y5NAObzeex2u5hOp2k8l5eXcXd3F2dnZ1EURbx79y4Wi0UMBoMYjUYpkqlEyRhUb0RKt9ttLJfLB+8igSg4D1JQH6VutXGihuyGSdrOfm1EpBuu0WjUIEOdtNGvuq6j3+835Lvb7dKWz0qQ+JtGzBkv7yjR6Je+t0Sj9/RFfcF9QHFEcUx9VH2ZNCQlCJWx+j4+55E5/U2/6/bBuf9KUscmjE5E+l/xWtvWyF0Og9vy8dJy5NPkyMPgmhOwliNbjvycHLlcrtI5BAdajnz+HPmrb7gwkGMd+ZROAMzqlNTtA3Wj8rvV3ONejquzeJSA8333E4B6vV4nCqw1ugAAIABJREFU4FHnBdxRNMbKeDqdTnosro+FdVtbAIxH3kVRpCgHJMT2q4xVo444j47ZjbkqqsO28FXVMCwlcSIfEDnRCQUfvUbr06ibGjBOrzsq8Z3rVO/aPnLRSCHyA9wUONXR6SM2QooAEVMlU+QFWZK2gE7QsxKkP4p3nwAk1fYiImazWSKP3W4X8/k8lstliqgR1fvhhx8a8h2NRik/vyiKuLy8bERyqQMbUdmrH63X6wd+wEsZKZvNpmFz6lvYAePic1EUMRwOG/Wo76NntVNNEVLwBlOYYDEG1QF90+i2grNO4HQyqCCuEUoFYL5DREpoapuOSchU/UPloIvXdRKGfTg5uY40ku4+TFFc41w9RwnRP3u0XW2bSUFbfl5pOfJpc+RekAeuVB21HNly5OfgyNXu4Os6wW858svnyFPlV99waQf10aEep8N0RoXFIFVBqmi9G9W6MSo1bhdQxCEyFnHYQYTfnGyoT42GR5ekNLCtbFmWaQcgojp6d03kp6qq5LzqJET2ICqiOICJPoKGrBQk+fMoAo/EFWw7nU4U0cyRBuw0okb71KVtIhttR8ncQUJ/08/b7TYtaHadIleNYrhOFMhU1/oYHNn4o/OyLGM4HMbZ2Vmyt/l83rhGbQRQp45+v5+iZT6JUJnQnk5qkO9ut4vr6+sk38VikcbIJGSz2cRyuWyAPtdfXl5GxD5VZ7FYxGg0iqraLyzXhb7UhcxVjhp5pK+AuoIS41HSzdkPuqqqKm5vb2M0GkWn6EasIoooEiFTiE5qNJT+0BbgzW5butMSciWCpxFtB1WO0UfIBSIj6q4TSvqhdqhypC/ad17OulwuG+sYeKeH2qtjDO2q3Six6nh8so3dOXHkongqUyVBxV2K+ibj1CheWz69tBz5tDkyYo9TfjPccmTLkZ+NIydnqQ7lCkrLkc+TIx/lCVcORFSpfveoys3VpXWgPH5HeAqEKF7vvFUwCjjUBfhrNEAjdtQHSLHQMuIAnLrol0fUOKoCBI4DIUE01KsAXlVVyhfOkS/yIzcewjk7O4u6ruP3v/99nJ+fp/qT8fJmcwF17SPAQVSRdomcKViq4WuUSwFKAY22AEfXOTLyiUOn04nZbJbqp6/oAEf1tQYKOl6wFcY5Ho8TgK/X6xThm81mMZlM4u7uLm5vbxNBK3kyISC6io1Qh+pH7Z6/xWKR5MQ40D2yJ62jrvfpBPP5PMqyjK+++ir6/X4sl8uUv45fAPx8Rm7oVbdORocqLyYyEYcoHn6lgKKgFxEN36o7zYWnjF8XaEMIEZGi1T7hoV58hPZpq9frpbGiW/qm/RkOh402sCkHbCcRnSQhA+pnYXpENMhSJ8Hz+bwRkVMC0kmc2gW60kmf4qFGVLVvyKEoDjufuU+oPjw6R1Gs9uO03ZZPLy1HPl2OHI1G9wJobkzA+FuObDkSHT42R6JbbaflyC+fI3O4TXmUJ1y5O0xVkgK+Ak2OXPRuVuv14woqGo1A2E5mg8GgISTNf+73+7HbHV6GCJihEB0XAIFCeczsTkW/qNOjjIPBILbbbYxGo2TkOD+gwfh8vLoVLTm9t7e30el04vz8PPUXY9rtdof37NWHqJJHNwBYxgqIIRslc4+0Kcg46QB0qksilkQqB4NBLBaLlDO+WCwSQOMcOmngM5FAdKKRJp1g5KI8GolivB6VY7EtY4DUSE2Yz+eJJDUyBAlEHICClBmN1Cnwo4PhcBj9fj8Gg0HDfrGvwWAQ33//fbx+/Tp+97vfxbt375IN9nq9BC6LxaKRloFt6mQPPRIx0wmaApgCm0aDkAVjfeB70VxzgjxUn9g37RCVo2+QKmPYbDYpqkcdObDXSS56pt8Uxaq6rlPKCmPjenRaFEU6pyiKhCuKGyobJqYaVVUcVKJgbEosnKsyVL9FnvxXn/Px60RN/VQndBzX+rkWvbTl55WWI58uR7qe9May5ciWI9WuH5Mji7IZuGo58vlwpC+p0PJoa7gU+DXy5OfpnTsg5v+56wekcECMMOLhxI72SUEAHHAyVxqOQf0KBNvtNjmvEh4LU1m0iaEAAuqoSkIAB8AE+GAkPDZXUNe26S/9ADjpkz7WJz1Eo4hVVcVmu2nUhfwVmJGR61GjHOoEWocCBPJW+SDj5XLZ2JaVx/0R+7xxyLGu67T4VevQlJx+vx/T6bQxJiUy/qNbtVN1GEgU2wL0h8NhLBaLRjRKdYjcF4tFYxvZ7XYbHz58aERVcGJIFLlBGufn542dlLB3zkWfEYfJyrfffhvD4TC++uqr+O677xKBoUv+MwZNv1DdLpfLKMsy5YIvl8uGXJCH+zvgy7bNTJo6nU7Um0NkSBfjayQe+2bRr4Itdok942/YFqCJjDQyjHw09x3bxFa73W7MZrM0qcNXiabr5Fb7Sts6cdRIOH1WfImIhCe6pbFOtrU+jbzRZ/U7+srYdDKnn5msYTcewfOJrvqEkgznt+u3fllpOfLpciSTYL1GJ5ctR7Yc+Tk4suoeghmDwSBWq1XLkc+EIxWTvDzKLoU4OB3Vu2mPIimhoAgM1Z2d4whTHzVyjgIdSqcelKIRQwVLBXyuw2lVIfyuxovSeVdFRPNxMuSiURnaJyViOBw2QJzj3W43BoNBAqfdbpfkwfm0jRPRP0iKtqgv7oNndX1YrKyO6capDohReSREH8mjc9UZKR3oij5qighbhgPKu90uRdF0Ia5Gjei/ArTqU3Wsutb8XE3PoL8acSFCRDQNub58+TKm02kiy6LY78KnEZKvv/46ttttfPfdd7Fer+Pi4iI56Gw2SzalpIWM0CVAjY6QMRMPJg5v3ryJyWQSr1+/jh9//DEBDtFG1a3ayXq9TkROexqB1DUVCmIU6mKTDLXPFCHaHM5V0FMMQPZeP36tkUeIGL9XUvNJjeJHp3N4ezzjI58cHSsZ4MsamOAz42ZiofnzekOiu2Bh505u+IDbLzrJTSjV/zQKr7KkjrQ+xSbqnKckxhhV52AShHOKSNpyurQc+TQ5Um2aGwr3l5YjW458bI7cyaYZ4GvLkc+fIx/lhktzM7XQGVc4nSLvVI/ro1l9tKzRDAULvasmAuCAqHfcREP0cSjkAGjpnfVms0kRiOVyGRcXFzGbzVJ0jDEADOy2pBEIojSAYL/fj263G/P5PC4uLmKz2cTd3V1EHF4mOBwO03hwbj53Op20+JPow2QySU49HA5ju92m3FmPTKuRquFqtAAC05x6lS19RQfoSEEFZ2bcrA9A/rSpqRg6PurnPPoJuJIfzjg5zyMPamvYgkakADN0g/PP5/OU/0wd8/k81YFc0lOduo7ZbBa9Xi/+5E/+JN68eRMXFxeNx+aTySTJhP7NZrPkR4wPn0KugCdEQtSt1+vFt99+Gy9fvmwQ+G63S1EzoqZE5QaDQWOShN6xa/50gTEy1UmDR622222SVxnNHbU08oMvq0/7pNOJhHpZJ0A92AXy8ogjOnLCqOs6JpNJ8jdSNJTIsUP6iL+hO/pCvfrEAUzSqLVOgjSlSifeREOVhClOuOoTXK9PKdxvnHCQc674BFPJqy0/v7Qc+TQ58lj6T8uRLUd+To7cCIzCHS1HPg+OPFVO3nAVRfE/RcT/Xdf1TydrkU7hqHTA7w4ZrDq8P8oEUFSg/I4jq5B1kAwa8NDIoC4IhAyUhAB9gBOj1RzpoihSNIfInToaDkb0Q9+xoBEewIr2SRPAMOfzebpWZUAkAgNnfNpPxoizrlarWMchcgBQeE63tsVY1aA80qNExLk4vJIP9esjcXRMdAO5EFG6ublpRGMYG7JF9rPZ7EE0jrr1EbJOLtQusb+ISCQc0XyBIO8zidhvK7vdbqPf7yew0XSD5XIZP/30U8qb73a7iQCQAe+cwb70MTUkq2sjdEJB1I71C+Tx//jjj2liQd/RhU+4AN71ep3sDtlrBFH1haywce23Tk6m02mMRqPoVt2Iw3szG5ND2tDfiERXVdVIO4E0NZqFzWLb6NZ3OtNJzWAwiNlslvQGYQwGgzTRY2yOQ/Sl3+8nDKqqKkajUVqgrRhG/RGRrlFsUtlpRF5lpLiC7HW9CP1QYkCPKgPVl0Ye1S+oiz5yzDH5GPH8Zy4tR37ZHKnYC4/oU56WI1uO/BwcObjfpbCOww1Xy5HPgyNP8eTJTePruv4/P4VIdECqdDqvkR2NQCm4qhCUSPjuBMJdMREcjUZoGobfhVIXhIfjRkTagUcBXh/bkt5wdXWVFhETffLICxES+tPpdPbbZd+TnEZ0IiLtmtTtdmM4HKYooMoHo9XIE7nHKJlcYcgG+ZTlIVJFRJP+Ih+IQo0UB4V8VNfUozqmDl20CahpNO7Fixdp8TLAu9vtX2qoL6jUCQc2AZl77q2mFfBZ/xgzclcCres6gT4RXMZIjjQLddEDC0PH43GKzkwmkxgOh/Hdd99FVVVJP/Rdt7LVaI+2ix0wRiY+EYcJg5KnTjQARkh2t9vFYrGI2WyWJjEaOaJ+XcCM32jb2BfjwGY6nU6a9JDK0SCLupneop91oqGP9tGR+yyAyg5Sepydq5CpTj49VYJ0HAi6KIo4Ozt7UK9O7sAJbBP9MaHTJxj6tAC/YRz0TfGC6GNRFGkROPLGJuiPRvCQlZ6jgA/Wqv7oO78r8agP5yb8H4ve/WcsLUd++RwZcXh9Be22HNly5OfkSH4rovmOvZYjnzdHnn5L1ycWBkqDniKhd4RqVKooflPC4DuC0DtyvTbi4AD6+FLJRRWld7kKhBg3j4KJfHQ6+xxlUhi2222cn5/HcDhMxkS/NdrhTjqZTBKo8zuOg6OMRqPG42PSBvQuXImJggPU9eFt8WdnZ0mO9AHQ4rPKNRfNVlkvl8vG+0GoA4fhjwgLbXHcnRr5rNfrmM1mKVJG+xqlYWzYF33SFBf6BCBEROqPpmSojTIWJ0UiRsgI3Y9Go5hMJiky3O/345tvvonBYBB3d3cpyqMvjlR75pE776fpdrsxmUzSd3xBbQeS0F2ZPMK62+2SvQKCADT+BklpGgtFXyKK/NQGPJLHeBQokROTDC9E0TQSqxNKbAW78jQa/E39DPuoqipN9pTo1C+VoJgEcD6TAGwN3erErJbofsQ+kss1kC5t9nq9mEwmifywX50E00/qRg+NiWBxiLBpJBZ5KoboZE1JWtOJmIhovYoDyF4xQuvV9tvy6aXlyCfOkWXR2Hyg5ciWI5EN5TE5MqWyFge7Qy8tRz5fjnyUNVx0BGPUwergAHFVOOew2I8OI2AlpNxAdLA4GG1ikBwjMsLveiet0UKNhpRlmR55cg79ZEyQD2NSYuI9H4Dk2dlZeu8AhSgJ//nMgkPNsdfUjbIs48WLF7FcLlMESdMVGHOv14vd6t44MlEsCnKGWI4Z0mKxaMjco3fIgHo4T50SwtPdd6qqivPz8wZxuv4BeUhIHzsDtJPJJG5ubpJTK1GqHRDZIscd29BUhRwZdbvdFC1ii14W/dZ13Vg70O12YzqdRlVV6fE8ExSAX2Wu4DAej+P6+jrlQfPIH31wrUZNVR9EJIuiiPF4nEgpornQnYmWRpRZI6HpHhrRcpDD3ln4mkinaL4PBB0iQ504IG/sRSNWmq+P/NQXVY4qFyVB9EwqEn6EHaid6mQEXQDmdb1/N41Gf5ExUdzxeJxwjQmP6p2ceM5B/4xbcQy7YPyKG8hOsRHf43qtz3FUsdgjdykKW7SphI9RWo58ehyZbLs+bMiADlqObDnyc3HkaltjUOlFwPhAy5HPlyMf5YZLH/uq0/pvGg3Q/ygDI+C45hirAendKecjFK4lArHd7jePIL3B3yCt0QHAh75BIPRRDYfFojgIClWyIsrU6XRiPB5HcR/NWK/XiThIzcDIaJd+sDgQEKZOPvM+Dq5ZrVYpcrTdbtNiZjVCjEnJWh1TiQzA5HwFFOTkxqbEgjxIM9AII9drvv1sNouzs7O0QFMjDuSlO+nx+ezsrBGR4jeNyGBPABrH2AVps9kkUtrtdmkbWiYCb9++TfplbLe3t420g+12m3Ks0SH643eiuEQ26Rf9jDjs4sPYIcz5fJ52EFK/YVxFUSRQWywWMZlMYjKZNGwEf8K2AFvkRjRRAYvzVd/4u9p6WZbRjU7E8rAWY7fbJVu+vb2Nfr8fZ2dnD1JH1CcgKSVlXYSveMI1HOM/YI4dsIYA/SpOQBoRkXyGl0GCG9SnOIe+lDSRFekrGl3XCRf6IFKt5KlPF5jcEpVFVoC+RrPBKScZ9KVRTyU4JRLqraqqscFBW35ZaTnyaXLknCcccZiAq523HNly5OfgyKJ7/+TwXoakgLYc+bw58tGecCEcJww67B3FuYhCaYqAGisKom5VnBuOkpQCmkbDcGgcn/ZZGKlkgmOiPJSpkUjtj4JlRDN1IqK5CFCVqGSm4AwJ+kJAbVONW/ukBsiixftOpSge4+QaNXqiN0RuElBI1FL1Bwji6JpLqzIlSqTgQcFWiH4QOWOh6+3tbapbt5lFNlVVxeXlZVxfXzciKxGRHn1TsEmcXkmVNgHkXq8X5+fnaRGuRgEhmrre7+jDo2naXK/XKfJD9AxbVEJV+8EWZ7NZ+m21WsX5+Xl0Oocc8Kurq2Sj2B3AxCRK0y2I7mIfEC92DVmjb01/0tQBXTehkyh8tNPpRLk7+OJkMom7u7tEuKT6IIvtdtvYWSoiUqSMdAQInyACEzL1N2xYvyvYElG9uLiIxWKRztf0DWxWSZa69P1FRB81cqkROsau/sw4kDnn+0RRI286JiYUeq36jkbi1Z+cPLB/x0/kpbilNwGc05ZfXlqOfHocGbFfw6U3HspfLUe2HPnoHFndv2srIsbjcYxGo5Yj43lwpAdXtDzae7i8MXVmjUrhHDgI16jQ9XyUzHF9FOokojcZRJ663W7jKQ+O5pEfVag+mtT2OAax0D/q4YV6+lgVB3TyAWA1GsJ/ImkaoVDHoF4nDQBpsVjEcDhMkb1+vx/dXTdiddARslTjpD+keETsFyqT48o5kJ8apEYLiqJoRNlYsE2+NQuvIZder5eIjOtURxH7FA0cHJ3zHcD/8OFDnJ2dxdnZWcp3p170PxgMEijRP/qPbgEMZF6W+0Xgd3d3KZqjAL7dblPUD/3T/4iIq6uruL29TakL6JBxMBb0ji8ApLShdd3e3iZ5cn4unWc8HifCq6p9DnddH7bmRW8adeZ8osIUbHI6nSYbL4p9mgAAn+z1HlABQUibPhBJI0qM7WmagD6lVYxwX9KIJ76lUX6IA7uNOLxksdPppHaISnMMQtVdlMAPCJU0L8ZK/7BdxQiKgjv4QLoG42CMKddf8EjX2eQK7TqRYWfqV8hOo/v6xznU0ZZfVlqOfJoc2bl/agPu6SS65ciWIz8fRxaNulqO/M/BkY9yw4Ww9O6PTvidKwLUhYM6mBxQOSkpgHM+jq2PE7V9QIDHswpYAAuLPomAcOdMlABF6di0rzgM56s8MH5Iif7yvhHAjN+UnBT4lIwYL2PUlAvVzWAwiN62HzGLKMoyOkWnYTBKlBg6EbPRaBTT6TQuLi7i3bt3qR0lXZ0kIBPyoDmPsUBcy+UyveNhs9kkwGEsmiJChAoCLooirq6u4s2bNzGbzVKu8GaziZubm7i4uIjJZBI//vhjbDabODs7S9E+FnRWVZWioxCP585Dgujz5cuXsVqt4ubmJpbLZQLiTqeTCBiwRm/k0fd6vSRT7Fl3ktrtdmnBL33QSQypBpPJJIEb0RzsGRKDNIn2EY1EH4BnVR22O14sFo3oJuPBT3SCh81AWiyWh2DI+deCL0VEepGn+lW3u18kSy7/1dVVDIfDuLu7S4TEug7doQtdat+xvdyEFr+BMNE5fsSYiGiqH6EH5MGYHGu4Dn1QP9ihmIXvUxdpH9omNqh4gy9Rt064HZeUKHzi6n3T8/QcjnmEsC2fVlqOfLocSX084eC6liNbjvxcHDlbHTbN6PV6Kf2x5cgvnyNPlUdLKQSE9G6VY+SGq5BHo1ESKEahjx4ZMILGaXRAGrmCqDQao8SmkT3u0PV3HIE7fxYyMiba5zpAkXa73W4CmIhoADqfqUMXNwLMmleNwRMt0XcsAADIRxe8avQBkozYO9ewHkT8dP8+h24vGSwLjqkPWRRFEefn53F+fp5yxt+9e5fAEBJUYyRCRxQOGaFH5E+EkBxo6iQKpgQLEZEaMp1O08Rgs9lv7fr69esk+/Pz87i7u0vAEXHYqpZH7Bq1wbY0YovOAbvNZhPffvtt0g3ghU56vcMLB9frdZyfn6dF00Q+iezpBMEjUES1PfIDOUyn03j79m3U9SF6O5vNGqkj6PHVq1cpnRRyByjxR0jQI7PYkQIKOlcgZ2chSPfVq1epvUF/QG+S7iByZDCbzZLOqQfi7ff7aREzbc5ms5RKQwRYdYA/46OKD9hBp9NJ7eJLSjbqg6p/CL/T6TS2o9ZJGTaBr+tnjcTp5Bv7AEP06QJY6BNSnbDtdrvG7lnqx0oGSlIUbDBHMoqzXI9ftOWXlZYjnx5HriRtkHVdPLVoObLlyM/FkTHfr5Eq4pA613Lk8+DIU+VX33Bp4xoJoxNEJxgsAry9vU0DBDggFVUs10A2FJTMORR98RoFIyDvF9KiXu3v7e1teoQPqEJSPGqsqiqBMoDIIkM1qIjD9q6QjPYLYPU7cSVZJWM1Jn2Rod7ZAw44gxJERERXIk39fj+m02na1UeJl4gZ0afhcBhfffVVSkmAePVRc7fbTZGZ8Xgc/X6/8V4mJWYFFrUb+otDaTTl4uIiAcX19XX0er24urqKXq8Xl5eX8eOPPybne/v2bVTVPl/9/Pw8EQA2Qx+2232OOS/oY73bxcVFijytVqtEDvSJR/48wsYH2N1nPB7H3d1dIkSiXEqmEKBGOsuyTOka9PXDhw8pP36xWMTXX3+dtmsF/Hk54W63i/F4nKKIFMZJXjXrD3q9XoqAArIAtfstUaSqqhJB93q9tNh4OBzGdDqNbrcbL168iPiPiKI47PY0Go2SX2vKCbajZHt9fZ0mMdjH1dVVwozhcBg//vhjg0DQMRMPtemqquLi4iJms1kicOSvC7Y1OqYgiq3SPum6agNgBuPwaBtjBb/op7ajOfEaUdZJN5FtJguMDx/TaBz2hx6VEJQw6GsuYqnErJPetnxaaTny6XLk2iZcbAyhsmo5suXIx+bITreZct9y5PPhyFPlV99woQCNutEwndC0CG4EAFEATt/pgAL4Ayz5DUBBuDgUfUBo+jhY79D1PO0vTr1ardJNiYIgY9DHmgCIKg0FaPoAdeidPlEJDIw6SR2IiAd9jThsCwoBMS6VA1GTAylH6j8yZ2eeu7u7iDjkgCNnFkDe3d3Fzc1N/O53v0vXEd2EbAAW6mZdAI/0NQqpxg0w8J33OKleIZRXr17FZDKJd+/eRVEU8dVXX8W7d+8S0azX67i9vU16g0A1Uswi1KraL0zFeSaTSWy327i5uUkR3MvLy1gul41oD9dpNFltQyNQRKFID9FoHgtsSc2hfYi3ruuYz+dJvjg2+fXobrfbxZs3b9LC28ViEefn541ouUYjR6NRip6/fPkyRWT7/X58+PAhyV1JV4GOfP/z8/NUF2Ol/jdv3qR8a3yBHbiInjtw4ytE5m5vbxOx6nbOZ2dnqX9KzjoRxZ/4Db2wyHs8HqcXXZZlmVJJ8DlPgWASx2J22sAelHiQt0fvNB2D48iC3/mMzukLNqQ+j43zp/1F5ooL9AvMzj0N0T5zHudqJLEtP6+0HPl0OTJFuONwDjdEES1Hthz5eTjy+od3yVbhpJYjnwdHniqP8oRL7xhpkP8oQgGqLMtk0BGRFhhqXiZ3/Z5qofUTvUKpGjlUslGnR1l+t46SqGc2myWA4jyAiMfykCLX5wgLgijLw45OEXujUEDWCBCOTyGPua7r9GgbUkAu3NHrVqvIYP95f0yjEIAAC0vn83l69KpGrpExQENfWjibzdJjZByLfqpDEXEAkNQ4z87OUptEH6kHZ/dIx9XVVVxfX8fXX38dl5eXDVv65ptvUr43ETJemPnu3bskB33cTIQNsijLfZpJURTJHgAuiMgBhtSF6XSaxnp2dhYfPnxI5MwYt9ttahsCICIEGaP78/PzBKa9Xi9evnyZdshCd7xolEkKBA8hMyZe9jkej+P9+/eJ1BiLRnLUZ/EBQBxwHgwGMZlM4rvvvku+MJ1OscIUWcM2WYyru2eyBTEYQAQU+xkMBvHu3btYrVZxdXXV2CZWX65KGg3RM2yN9Jz5fB7r9f4lokxSKEQ2dYLGegGO+85ZRVGk3aOYOBFxVGwBwxQXsHed5HANTyGUhIjqaqqWYl9Ec5tnnYQrtiiJ0BeNnHOcc/XmoL3h+vml5ciny5FJB3F40qUL7luObDnyc3Bk1TnssqcviW458svnyFPl0d7DpR2OaOaOI1gdGJGem5ubdIetd/cqGMBWCUoFoHeuXK+RM0CD9lFAxCFH0+/WWcyHwwC+8/k8AQmO7MCu0Qlkg8J18gmIeLQSB8KIAW0lr91ul5wROendNm1CyhBKWTW3zOTRLyCz2WzSjjXj8TiqqoqXL1/GYrFIfSESgkPzqD5iD+bkQi8Wi/T4lyiPRmhwVqIGjEffZh4RCYQwfOyHRbLUudls4vLyMjabTZqsMNGpqiqRy3Q6jbIsYzqdxvn5ebx8+TJms1mcn5/HxcVF7Ha7BskCokRu5/N5nJ2dpUWrABS2OhqN0qRjt9unnrDNK/am5LVer+Pu7i6220N+PtE9PmNntPMHf/AH8eHDh1iv13F1dZXSOSaTSfaRPTLFl66urmI6ncaHDx/iN7/5TYooQtj4JLImYs2EAeAm8kpuPpHvbsmOToedwfAH+oFdlOV+l6S7u7sExETuhsNheicJOtZNOYiK6gSR3wF9ZDmbzRIO1v8QAAAgAElEQVTwQ3T0Gz3QR/ADPbG2wUlGc+TV1hgj48sRCjLFtrkeO43Y5+Qr3oEN+LtGjxUHmLDTF35XjKZeL+gfndEu+NaWn19ajnyaHKk3NDrBajmy5cjPyZHj0TjZHHppOfL5c+SjPOEC6PWxHQPe7Q756QAuAgccUZ7eQXJnjUD07pE6OY8oiW9NiSNBCPQp4vBIdLfbpbtgBEeON+DGhLGu6/To9/3796l+xgw4YySAsq6nAtTm83lSGI5cVVVawKnGjCEhR2RAm4yXqA2yoz+dTifKe2DYbQ+LmJGr6hIAo47VahWz2SwRBgSiqRpK3kTdkOl2u00vyiNCAYERweLdHNiFvl9EdY8eLy4u4uLiIsqyjMvLy9hut2m9w3w+j2+++SY51GKxaKRzAKzdbjeBF7KChNFBWe7zpNX51a403xswQrYvXryIm5ubRDakEkREAjCNjqFTInZEcJAjMnnz5k36jcf+2M7V1VXKWUaHjJP+E8GaTqcxnU5TPv1kMokffvghimL/8krGDaF63jS+Bmjd3Nwkkul2u1Fs7xc6F2XyJyZdmmagk8B3797FbDaLV69eJbLHB8uyTBFM3jVDX6fTaSI6nTACzP1+Py4uLuL9+/fphpC0HCYSTJSYOOhElag7dpLWQ96f71FATaeiHr5rlI1Jp/oev/FfJ9CKtfiWPtUATxTT+E2xQic6PpHWSajqm3PpY1s+vbQc+XQ58qCkQ7qQvkuq5ciWIz8HR1adw6Ywt7e3aXfHliO/fI5s4IqVX82egG1E86WFDFaVgDB4NJrbhpOiu5wgvNwOTaQ2AOIUjGI2mzWib0SAEBp91TtWfZyI8DHUzWYT33//fdp6FOdQYmS8qkDN7aVvCnZlWSYjRtlnZ2cN4yN3nf7WdZ3el4GcGDslPSaHiOtDKgQOTZ0YGAAC+E+n02Ro6Pq3v/1t3N7exvv371OaBo4bcdh2tKqqBLT6yJxH2PqIeb1eJ7kSEdM+YvDD4TDG43FcX1/Hn/7pn6ZoHGRxc3MTk8kkRSM17xvZLpfLuLy8TAQMqEMk6LTb7cbt7W3Sma4p0FxyBS/6GxHpZYabzSZNRCDY+XwedV2nRdGLxSJFg5hkQLLYDXKo6zpevnwZL168iLdv38bl5WUDrIgE0jfs+P379/HixYv4/vvvYzwexx/90R8lsiVaHBFpO2LGx4RqsVg8eDFnv9+Pm5ubhg8Puof3bujidXL90RUTj6Io4vLysjHZGY/HKZ0F/4bYsC2Ijc+9Xi+1hcyxKfya4x8+fIhvv/02Xr58mfpOfRGHxbQsXtYnCxHRWHsBgOsTAPTkUTsmafSZCSdjxLd1UbNOHrV+jdpzHH2Dj7Th/dNcd/8NDKFeZKqTz7Z8Wmk58ulyZJrMxUEH+HjLkS1Hfi6OLMvD1Fs5ruXIL58jT5VHCVcSnaJjALYCJCClhFOW+8eOLLDvdDqNrUK5Vu9w9REkhgiI8S4DhEBuLu3xWBQnwElRNk5CrrxG+tgNZrFYpDQCInIYB3nAkJqCvhoIj3yJ0tBvdjYCFIhwQVIQBwatj+XJXe33+ymiA1n3+/3ozA8LjNlKFCOOiPSSPxx1uVymnOnJZBLn5+cpisGjf/LINX95t9ulRcE4G9GkzWYTf/iHfxjT6TTJkagQ0TnkivNzLToDXG9vb2O9XsfZ2VmKBgLwjIGc8MvLy5STrLZGW8gOPVxdXaWJxWQySQTDjkzIkfQC7Bryoe/oApsimsvkibFQP3nxTH7wqfV6nd6XglzLsoy3b9/Gb37zm9hu93nuTr4KGAD2brffoenFixcNooC8Ly4u0uSBFAtsFn8lRYGxkLN+d3eX3mvy6vI3Ef8toiiLlEfPblbI67DO6zBZgsQmk0l6v83d3V0aCzZHSsvt7W2yP+RKlPrs7CxNjGazWZoUIGMImokQemIbYdaOINu6rhtPjyMiyUNTurBV+oO/6mQX3OF6j8pHREo50kggddEHwD6XEqYRNz7r5EInGdiNRhU1igi56Bja8uml5cinyZFMHBVfyrJMa3Bajmw58nNwZNk92B07JrYc+Tw48lR5lF0KFbARIh0gtxpHj4h0p6pRAR4ns5vRZrNfSNnv73eGAVS4idD0A4xA28bwMDp+RzGqFMBrMBjEbDZLBn1zc5Ouj4j06JbcbAogERHpbhlDHQ6HcXFxEb///e9T9A1gqes6RV0gGPKrVYGAPiC53e4XnkI2OP1ms0mECVmV5X771vPu64j/NyLkbpwoEYtM0RF53+xQwyQaRyHyQc45uup2uzGdTuP29jaREH1XsgbIIyIBZ7fbTTs7QaT0n2jedDpNUS/A8ebmJjqdTgK4s7Oz9PLK5XKZ5IBNkZNM9Oj6+jr1jbSOm5ubBIQ64Tk/P4/Ly8u4vr5OwIGtRER89dVXKfpKPjS2wHgA5bu7uxSpwj9IOeAcjvPOkuvr68aLMDn+1VdfpVQHdArBAA6z2SwR/X/8x39EVe23gKUP3W43Li4u0rgYE5MZjdYRKYUs1+vDe1VIt1ku9qRL20S6selOp5PkQT+x8eVyGbe3t2mB8/v372O328XFxUXyC2TL4madXIIXX3/9dUpbUpuAGIh+Y4c8USDPXuvUxbgaSUTe+BQRN7ARX4qIlCKjE1jIgz4hZ2xAJ5eQBrrRpw76pIOCv9I2uOkkohNwf9pCfxlne8P180vLkU+XI/v3/bt+9V/i//hf//eow9Zw7L8cJm5R3B+MPZeWZUS9vyr5zf01ZVlGvdvtf7s/tr8uDnVkikbky+J+17Sopf77i4uHL1wtoohdLWsF633Qi+vS+OiP9uV+HHosTSLr+4Op6aIhq91/bU5y+cx5/E8YI9dHLbtEFkWSt9aldTb6mrr+8P2AjL8sy1S/9x1ZNK6uI8qqjHpX30ueTVXq2Fm/vD8P6pK+1Ls66vr+ybBMzuGzliOfB0fmbJHyq2+4eBwN0KlAUIRHDxA8IDwcDtNEGwPb7XbpBX5EgDqdTtzc3KTcS72jxBgjIk38iNiRp6sLJbX/EMl2u21E7oqiSHfPvOOBu3EIMgHjPckQSeMY7yUilxjgJyLw9u3beP36dYpCMvnnHIg3IpKciUBBwkREzs7OGtdcX1/HaDSKd+/eRbn7Jl5ERFlWybGVgLmWvGUmzkymkT+LK7mpQO+84ZxIELqk3+RX49xledhCVSOUkPBms0nRSnKt7+7uEhnzDow//uM/TuTM2N+/f58im+Q0TyaT9I6Ju7u7ZD8XFxdpS18ilkyCuKHgUTMTGiYi3BiyIxCO/P333zdu8NCpplEQzSQlAVuMiHSD1+1246uvvkpA8/r164iIFIHjHRlEMH/66af0vg/GwGSo1+ulHPTRaJQmdy9evIi6ruP6+jqlTaAfIpXq0xGHd04QsSqKIu2AFbEHrsXykPaAf5FSgD2ja84D7LGLb7/9Nrrdbpyfn0dEpIgbKScQ0t3dXfIBJl6sBSBapaCP3TPR1acOyI4bT+THjmvUga3e3t4mHQP67EqFHZPugc0RIIDgdF2DkpESjGIe7TNZRs88CUB+jAu/0wi74h9jV1vUm3eOER1uy88rLUc+XY788eZmL5uyitXw7L+LPTzL0sLCLy74dMuRz4MjTz3p+tU3XDxm0w4CtHQIQTA4Bjufz+Pq6ioNkEk2RkV0nBsCiIh2Iw53yBjZZrNp5EAzyeVFjfSHnZQiIi0YJI+YyT8TVcgBotMx8hkFowjyTzE07rIHg0Hc3NzEy5cv000D0Q0enS8Wi7i4uEiPW3lHBm8RJ41E0wh0u9nNZhMvXrxIj5uJEkREWtirW37yQj+eRL169SrevHmTno4x0RoMBnF3d5cm7Dgwd/dMrnWhbFmWKSobESntgggFT6F4csJ46rpOT5VYlAyZcv2rV69SNHe1WsX79+9juVzG7373u6Q/IjHckOm7OGjz5cuXMRgM4sOHD3FzcxNff/11eiLEEzVypYmcUrhJjdinZ2hUqa7r+OGHHxq66vf7MZlM0s46TKqI6Eyn05Q/PRgM4vXr16n+xWIRNzc3cXl5mXaXms1m8e7du/S0ivfEAKzohV206Bf9YbLEE0YmX/SJCct2u427u7t0DEAEMK+vr5NMut1ufPf9D/E/xoEkNOWiqqq0CJenf3Vdp8ke/k1knptzfA0grev9DmaaesIT2ohIN/zz+Txub28T7tAnblqJ6HGt4xnH1O+5IWY3Nuqg7bqukw6I2lEPtgxugY88hdCnzJBmWZYJUzQNC0ImwIDPkTZCYMQJk0I9njIBjoF3jKctP7+0HPmEOXKxiP/5f/tfolNVMZ5MYjgYxGg0itvb25jOZlGVVYxGwxjdc2QRexyeTCbx/fdv4uLyIob3vlpHxHAwiLvpNL569Srm80XcTe9iPBpF3D9VGQ2HUdw/+drrcBVlWcRgOIzFfB51RLx6+TLe//RTdKoqRqNx3N7dxnw2i+FwFJPJJDqdfQBwvdlEdZ+BslqtYjqbxfZeL7f3m0BcnJ/HixcvYrFYxHQ6jW+/+y4m43H89ne/i6qsYlfvYrPe48ZiuYjzs/MYT/Zct1wsoijL2G420el2Y3a/Y1/V6cTre46cTM5iNBrG27fvoigiOt1u9LrdB09wzu7fZUY62vY+AybqOn4vHNmpquj3BzGejGM+m8VqtY5OpzpwZF3H9P5lyQQBvvnmtzGbzeKnn36K+WIetzc3cXF5GePRKGb3mTHdbjcuLy7j3Y/v0tOObrcbnftgatXpxG9+85tY3NtWHQeugVeKoojt/bXL5TLmi0V0qirKqorlYhHre37rVFWUZRXd3h7/o65js9lGp9uJ3ZY1mEV8++3/t3/6NBi0HPmfhCN/9Qpo7vroCHfE3GVyjLxqCKaqqvTOB11wB+BOJpME5Pyud5MIHadjog8JoEQm/CgWxyE3fDKZxGAwSDcO3JVj+C9evIivv/461UuEDMX+9re/bWwDSl447+DgKdBoNEqPf1+/fp0m/KTzETHgTpxUOYyoKPa7+jBWnlAgJ9VHXdfx9u3b+Omnn1IKYFkeSLhTdaLe1bHb1bFarqPe1VEWVey2u+h2erFerePli5exWq6jU3Wi1+1FEWVsN7vodftR1xGD/jDGo0ms15uod3WMhqMoijKKKKMqqyiKMjpVJ4qijOndLCbjs4g6YjFfRL8/iHoXsd3soio70e/1Y9AfRr3bP7rv9wYxHAyj3xtEp9ON+Wwes+k8Ioq4uryKy4vLuLp6EednF7Feb+L9+5/i5uY2Xn/9OupdHd1OL6qyE7e3d7FcLKPe7QHvu+++j9///oeYz+bR7XRju9nFYr6IqIsoiyr6vX5cXV7Fi6sXEXURP757H71uL87OzmM8nsSLFy9j0B/EbDaPu9u7iDribHIW2+0uyqKKquzEZDyJy4urKIsyLs4v4/zsPIaD4f34iyiijKgjRsNxXJxfRrfTi35/EFXZiarqxGg0jl6vH0WUURZlrJbrGA5GcX52Eb/5zTcxGo5itVzFzc1t9Hv92Kw38f7Hn6KIMv6H//Jf4/zsIsbjSRRRRr3bP9HsVN2YTWdR7+oYj8axXCzj/Y8/xWazjSKK6HV70ev14+52Gpv1NkajcVRltZfbehvbzTZGw1H0e4NYLVcRUURVdWKz3sZ6tY5BfxC3N7fR6/Xj7Ow8NpttDAd7IK93dbphJYp+fn5+P3HopIhTWR7WTVxeXiZ/7/f7MZ1OU1Sa83nSqjsqdTr/P3tv1mPJlVxrLvczz/MQkcwki0SVAF3osQH1j5PAC5TQ/+H+HAGNBvSiF5VuFWcyM8Yzz7N7P5z8LOxEUVmVYl5AYroDQUZGnPBhbzNb5msvs522VrQkjQA87BpkAJr/0WhkSaB/WSWGeCLAS6FSqZRWq5WBCJ/h7wjIJJQ8K2wcCTTxRpIxcUEQWEIMWQL7xnNJsmSXv+dc1AHwM4DgOVD7ewTMiHun02UrXB/Tk+P9jgQj/2tjZCaQyoW8Dpu1fvj2G02GA6UDSceDdDrosN1Ix4PSihUfD8qlUzpsN2o3ajpuN0oHUi6dUiqOFB8PyqdTCqKTirmMKoW8TvuddDqonM8ppVipOFI6kMI4UiaUUoq1XS5ULRWViiPt1isVsxkF0Unxca9sGKiQzaiYyyiMT0opViGbUTmfUyGbUTYVardeabdaKowjteo1tWpVtWpV1StlnfY7TUdDLWdTXXc7CqKT8umUsqlA6/lMx91GYXxSfNjr4faNhvd32q9XyqVT0vGg/WZt91zIZtSu19Ru1BXGkaajgfKZtOqVkqqlorrNhkr5nHarpTaLuVJxpHq5pCA6KRMGyqZC1coltWpVZQKpUa2oXi6pnM+pVioqjCOlFCkVRyoX8mpUy8qlUypkM+e/DwNVigXlM2mlFCsdSKf9VqV8VvVKSS96XZULeZ12Wy1nUxUyaUX7nWajoVKK9Pmrl6qXS6oWC0orVhCdlA6kbBhou1woiE6qFAvar1eaDgeK9jul4kiFTFr5dErrxVzRYa9SPqe0Yul0VLTfnX+Wy6qQSeuwPY9nJpCi/U6n/U7FXEbr+Uz5TFq1cknRYadiLqtauWSqoAQjfx0Y+a7jg9RwSU+7O8NUMRDc5HPdo19yZCM8ChIJoLwhA1ic0wdUBt8vu/rB9VIOSQYsGEGtVrNAjnSNQM25MBokZM1mU5PJRGEYGojxBk59EAaAEUvnpePZbKZaraZ2u60oijSbzS6YPt7GvUyN1R5JxoKwqtFut00SRvGpX96Vzl2Aym+VxcXb/6Hf3P6PXzrt/6mj4b7Pv/1/RlLJ/Tx6+8Uxf/sZBGtrPakXKCctuXPkJC3efu+Nu/L2S5IOb7/4zMad5yTpVmcmIv/2e46V+z54+2//s+fHxp0nentvsaTls8/hoqm3Xzn3u+mzzzaf/fuab/7f89iU33696+j+Bz9/6b5v/4VzPD/67ns/zwpkTBKSJnz4cDhYa10CPl2zYLXiOFaj0dByeR41/AtAQF6A9EB6KqSF4QMcKCxmZQCGbDQaqVarSZLpzv0mkx5AkH2QNCKllWT+TkxCguXrnvDPMDzX8fkEm+swHv7efS0djCasm2fZAD9iGj8DmCXZuDDGz4GOpF3SRezi2ZLj/Y4EI/97YGSlUtGLFy8smWLlzSdXxCGaNxyPR02nUxWLRVNy8IJL0kuiSzJMcszPuX9WGlk52O126nQ6Vtsbx+eW+8w7q3uoJajDlZ5WHF6/fq2XL19aY4vtdqtisaiHhwd98cUXGg6H1vyA+EuMoWaYumjuGdVHGJ73utrv9xoOh/Zizj5j0nnPMezBx6PHx0cVCgUNh0OLg8Rv4h9yOFZmaAcfBE+118zBbDZTpVLRaDSyldD9fq/lcmnjvNls1Gw2VS6XrRW9j3Uk5j4uI9lF0rvZbKy2nNpwfJZYTczfbrdqNBpar9e2KfTV1ZVubm70+Pho7fr52wQjZfHrvzNG/h+VFHIhAi0SAG6QYI4RwaJJsmVUScZ8URjsO7V42YUPTH4wvJYziqKLc/B2ykDCKPKm7TWkvmsOzo3sCkemmcJ8Pr/QsyPLY9+Kw+FwESBp/ToYDHR1dWXGyj0RQKixgn0g0KZSqYvAdzgcbP+IRqOh+/t726RwMBjY3hapVErH6KBTuFMQnOtfgwAj8zNJYSmln4H7fPBWJWAVtv+BNfxM2WjsSk+Dp3OTWJz/f3kO/29/j4EoAmYPBp7hsgCXa3E5X3TMmc7fP/370lGexoE/+bNrWSnt23+9vVgcxZdF0U9V0/YDxvTpA5fjxxxdjsHTuEmugNud/+kZ355Dcv+J7VxP86CLa/H905g/FRoruLwPPw7+s/xtEEhx6qlNNS2DR6ORMejdblez2cykrcvl0uo5FovFhTxyMpnYfiUwZSQK+Xzekhy6oVEs/PDwYLGDlr0EfEkWAwAKpLHUo+Df+CixiOQjiiINh0Nj4X2tE2yYP5BpUiNJjIQRDMPQkr3NZqP9fm+JG0BIssp5SAphEr3sgXsGfDxweOaQBB+mkHOQAJLwJ8f7HwlG/tfHSGRmxKlisWit2Bkznl2SyRp5AUSyTqJ8OJw7BOL/jAMvaxw0g0I6Tvyi8QgvVbwk8DLh5V3U7sDA7/d7lctlux/k+STlcRxrOp2q2+1a8s0qIysHjUbDpOK8MGKjJPV0yOPlnJd1pF7Y++3trcUp3/AEuSvznE6n1Wg0NBwO7Zqz2czsE9wgDhKjsHMarRDfiWPUDbGH1f39vd0zOJLNZq1hlq8RLJfLmk6nqtVqVg4gPa1oMA403uJFhhUi5qVWq2k+n1s3T/wVjEsw8un474yRXo74/PggL1wEOYIzS3XPpRR+uZE2j7Q5ZQJ4a+VN3mu3pcv9Mzwr4pc1mVCuT7BCO+qL42jw4Fks2EC07mEYWtMHGjTU6/WLeh1qlFjGpQUvzwsDBUBJsmLndDptOnzGkzHzzwU7Rce7bPa8k/xsNjOHrVQq5jzr9dqaLizD7/X1//X/WHAmePi3/91up9lsZqDC5ofH41M3LLrgwFzx98+ZB79Mi34cBo5mEuPRSPV6XZPJxGwnDM/1QeVy2ZaUYU7T6fRZ4/72mabTqa5evNBisbC2sYfDQZu31280GrZPy2q1Urlctg6D1AnAhubzeXU6HbMBmC0A+XA46Pr62s5FcIRFOh6P6vV62mw2uru7s2BFspROpy+6IDIH/jNhGNqmi5vNRtm3bMvpdDK2rlgsajAY2JwwRrDIzWZTo9HIgn4YhmYvsFeACGwQ7Yc9k7hYLCzBxhdhwBgbdqvHB9GAw8gCWpkwY/ZBrcVsNtNoNDIfgNHKZrMXdWWwlyRPxAESTe7RB0FAcbfbWXJDkgKw7nY7Y3PxVz6DDxITCKr4J9eGZUbO5OMIKwYkg7D7HF7//ly6gM8TI6iZYUzpHupZP74AfM/icX/+3p/HU4DSr7R4RhDQTiSF/7kjwcj/+hjJfFD3ywoGL7C8YBIneYGSZPGYOO+TLi/39HGEuFQuly8wElwtlUq2EkIxPnOWTqcNP5h3ksvj8WjPlM2eu8kSd0ulkknIptOpGo2GrUTwIk+Cy7Pz+Xw+r0ajYbaAhI3YzQsrLzfPMfJwOBhG0n6cBJzxwIbCMLSXPe6ZXICxpLU/ZAbY/i6MBKPIJ3hGSAPm6ng8Wq7CXLMBMjYFRm42GxsDXjJ52ab2jJc5ru9XZ+j4mWDkrwMj33X84hcu3vji+Kn7BwHTO5JnIJgcJgD5AsFwMplcGKnf64DAQ9BmEkjuoihSs9k0RgU2jDdbmD4fpLl/Jt6zLn7pWDo3fTgczm1laUzBEiuDjpHTMY4A5IM0DpBKpS6Kf2lggWFwcH8Ec5gtzyzsdjtNJhMb83q9blp5goOfM4wFQ/VLtz6QUkPAcvF0Or3Q/2OEfI7rw6Kk0+eCVF4kPLOKFnmxWFgxMvUBAKeXruz3ey0WCxWLRdPm83KWyZw3YMQeeDGqVCqqVqtWSD0cDrXb7UwaEMexdfnxLx4AFWDiX0D8c/P74/Goq6sr3d6ehYjlclmvX79WrVYzVtuzzQQjX4wJO/zceU+nk+r1uj3Pfr+3YmmCxXw+t5fHyWRiYMPLKMGZvWM6nY6CINBgMDAbYux5fjqfxXFsbBq2h9zAv2zX63Ubl1qtZqCJxAJmD1vCtqXzC1sul7NCXewU/8SemTPP6BNrGDfPTJHMwVYDdDwzCYYPnn6lgXnHFmFGpSc2Gnun0Fh6ajPrQZB7IUnAJokhJFuMC4wyCQYMPPaD5v95IswYPdfmw17jT9yjvz8Or8knifhLgJIcf34kGJlgZIKRCUYmGPlxYOS7jl/cNEOSsSi8efr/w3DwJumXKjGw58GMzzCoyBcYKCYYI8IBYPBhX9BKExB4o8ZIMBqW4GENYAa9vpXBZeKWy6Wur68vzgFgsQrCxOHsdAYEcHibhxVjAzx+z5JotVpVsVg0qQLLtH7iuUdWNTyrAKPBs2CYrN7s93tjBNBNE9zY+Z153O/3pt+OoshaqhNQOL/XnTNHzAuaXr97OZpj6RyEuV/uMZM5t/RlCZzzSTJ5DAEMO2Elzp+DwEhCQhIAw8OyvCRjOpGkMLdo3mFIJ5OJVquV/v3f/13L5dICCnUUQRBYstTtdm2sCEQwdwRxxotn9K3p9/u9Go2G6vW6Fc5z71F0LiTHBoIgULlcNlYSdi6bzarT6ejVq1dqtVq2LI+PsGpJ8oNd4QP8PAgCA+R6va5ut6swDK343W9+SrBj6Z/z4O88O89IApHL5VSv19Xv902Hz3zB6Pnxgq0iACNhQv9fKpVsTPg77yuMIzbimTsAli/m1SemXlLgVy687MDfL8kDDQS83EZ6kqLhuyQPkgxYvQSMcfPxAwAi3gBezxN+bIYDAOIeGIPkeP8jwcgEIxOMTDAywchfP0a+6/jFK1zesFh69/pMBtcPLI7Ig/IWCwPC0iVSJ/9mikaTwaDjChPNciETzkRSKDmdThUEwcVbMvdKYOGtXjoHFLSiURSZNI0OSSyXw0Z5potz8n+ChnQuYJzP51aAikN5gMCIAVsAsFarmaGwDF4sFk1P7usDGG//Fs8Gt8yZZzrQ+cLQISVIpZ46ZsGkMkeSNJvNFMexOp2OMYnoh2lviqEimyCAEdCWy6U5DS3oYa0IPLClBGdYOuYCWQIAiq4X5hC9L5I6WFLGkHkm0SGYYwveSSnSzGazevHihe7u7rTZbFStVo1FZMw4N8XAkqwAF3YZ8OP8jDN2PpvNzEb97wgk/J7gDrsK20fgQcf93Xffaf52Hxr8EP9Cq01y4Bnz1WplyQefqVQqZi/NZlOVSsVa4sOMYpckDx5QfMBDHoCUhMSiVquZNt6z7dI5cHt2ClbMB+7j8dxmu9VqGQMMAAO0bORKvMFn2ZQbn/KMOPeQTqdVqUvlvewAACAASURBVFSsDgB2kXjjE21kFSTVXg7h/YQg7gGFMQP0SE7iOL6QbpDk+3N4SYR/eeIZfJLO54lxMKnJ8X5HgpEJRkoJRiYYmWDkx4CR7zo+SA0XRkLg5o2YQOXfCLlpX//jAzHLkgR0vyzKwCO/wJgJbL7zDMyBJNvLhEEicfBvylyDJXr05ofDwWQgBD+ebTAYGKvAz6MoMiaKt990+rz5GwbFMud+v7/QZ9NtiAmUZF1cMEKYPJa4Ac3dbqd2u20BmOujyZXOgQ2AxfGpJWJvK8YbPT/gEcexut2uMSsAHYENSctkMlEmkzHAYj5LpZLm87kFJOarVqtZwGKMAQXsCsPebDaq1+vGxsHA+Pag1J7lcjkrRi6Xy8bO4kwkH2h5keysVisLnHSskWSaZ2yOgEsBahA8Lce3Wi3byR0751yADHYBIHn2BZuTnpb/vQwHRg1n5/5rtZqxaYAugdRviDwej5VKpazou16vazQamQyCAIw/SLrYmBEWm2cHjBeLhbHa+Dl2DEBzX5wTqQsyIvwDOQ6M7XQ6vYgl2CRMM77KuUkGKTznb9h3hFjE2JCw4m/YYyaT0Wq1svsjgBOksQXslaCL3RP8uZ7XyMNO+/H20isv38DusBWeMYoimzMvy+DfnpEjhhJXib983oMWPycZ9WCTHO9/JBiZYGSCkQlGJhj568fIdx0fRFLo35J5u+XGCQQ4wM8tJ3rZgHS55H86nczInv8NMgz+7Q2oUCjYwDDpSBG4DpOKY/t7TqVSpmkm4PguO2wmh7yAyTscDhfL8Eway8SVSsWWu3Ec2L/j8dx+k8nFmAHnXC5nrWwxfknmoCx78zvuAVaG+4QBYVkZACRgwPg0Gg0dj0eNx2NjVx4fH20eT6eTjT8gB0MFGMOKEPTT6bSq1aoFTw8cOCl6fZ9QrNdrzWYzez7GerFYXLCIOBOghe2wDwX3jGOWy2UD2VQqZawgLVCz2ax9jyPSvYr7i6JI0+nU2KFUKmUgQkLFPMPm8fw4u/+ecctkMgaAz/fCIeHgK5/P6+rq6oIF9iz44XDQarWysQNokVL4JIQEgnHj2n7JH1Y6kznvGTIajTSbzYypZZNkui5hPyRLPD9jSUctJA2efcIekacQS7wUATaVxEySNVshSDN/BPBs9txdKpVKWeGyT5AABeIbiSbSLECe2MD5mSfuEekM8YnYw/jg24yvZ4l93PM2yM+4N/8z/zkv4WLOfLz05+f6+Ab261+2fNxJjr/+SDAywcgEIxOMTDDy14+R73rx+iArXARNHoDJgOHBAP0NYwB0x+EmYUSeTyx/w8+YDA9eBPbT6azp5vzIHDBaGD6/1IvsAHbC/75eryuOL4viuD/fhhJQ5F54Q4eNgKnj85499IwYIMrbv2dhjsfjhTyAscHwPTvnDQX2C6bCX2+9XqvRaBhLSSEtAAc439/f294qPFs+f94JHKeDHfFMGewiz+ABnODnwYQuMwTN0+mk+Xx+3k+sXFa9Xtd8PjdJB78HoNEkw5ZwH+v1WtvtVs1m056PQA4IAIieZaMzlWdJYHmeMyeZTMYYJeyBz8K8UDAKeyM96e0pwmUu8AdYSWQGdAokWZnP59aVERYLKcl6vbaWwIfDQVdXV1Y/4TtG+XHmnryWnODpgxL2GwRnSQes7XK5VLVaVTqdtn11mHPYrlQqdRFkkSOQ8JBUYEskVdiGZ5tIpqQnlvru7u6iiJUY4u0KnwKkAREA2Eu8fIJAxzR+79lY7Jv6Fq7t4xa2gxQLP+X3XgrCffpEmPjK8/A5QJJx8Ywj98u9MO4+VsAaejkVKyYwmMnx/keCkQlGJhiZYGSCkb9+jORzP3d8kC6FBDVYIyaEgOxvgOAnPS0F81A+oDDJ/m95KH7mgzEOTfFvsVjUdru1tpeAFpMHmHk2h7dwjtVqZUwTwdizZsg6uCfAhsAFI8ObuWcCYCYITDyD36QPI+f+uDabK/o3dYzg6urKnpvuOX6pHamGJAvymcy5cxFsFOcEdLlfltlxPGQcsG/SU4DxYLpcLpXP59Xr9S46LeFwBBEChj9XEJw3H5xOp1Y07YEKFo+/gcmElQEkCMTYqqQLZgxARuNMq2WWnKWnBAJWCDv3S+LMg9df++V1pCHMn08sAGYPdBS9TiYTky4wLiTABNhUKqVer6dWq2WbJeInzDfP7tlvmGUCvfdnOj0xFnxJT2yOD54U5cMa8nvugzoNEj+fYPp5L5fLxjZyj+jVPdOZSqUs6UmlnjaJLZVKmkwm5huSzAcI/iQ6/I7xYp44CPJhGP7ZRrPYhAcL7IDfU8T7/DyetfNsuGeXOa8HB+93+JsHC796QozyMYD788k45/DxVJLFZ35OgpQcf/2RYGSCkQlGJhiZYOTHgZHveuH6xZJC3tq5iL8hgo53RowgDEMzWhyPQYZ184bv9avPjY9zAy7NZlNBEFhXE7oV+X2nWOaFYYA1IhCxbOwHH52qX+5Fz8xnvKFxUFDrO6fgCFEUWTEv7KJfMvXdZGD0GG+ACudGAuE1tgA2zF61WjX5AsvTFDtS+MheGN5YcRrGkiDGfDKHjNfxeDSZCQxVsVhUvV63651OJ9M449QwJOl02paZYYXo2HQ4HKz9KUG1Wq1aMlitVu25sBmCF5IRH5ABeezWF5RTFHw4HGyZXZLW67U5Oywm5yUJGAwG9jO/ZwpOTDCDwYrj2Ng2WGdAjv1RptOpadfRwmMTuVzONPSZzHmvj3q9bgXByHVoqdxsNtXtdg1opSf5A4XU+B3tabFBAh7PAlM1Ho8lyTpEAZwcJGT4CcBNET3A32q1TB4hPbFf+BbnwHeLxaLtJ4LUgPnHfhlPzgf4wLz5oIw9B8G57oDaAWKN15djF/gr8hDG1QOmfw5sg8QiDEOzT2yR+yEZxeY84BH38HtsFlv2gAGweIAillJ7Ij215uVzAJ1nQ5PjrzsSjEwwMsHIBCMTjPw4MBL//rnjg9Rw+eU+BsrrIglqAAuDyGeeMwosDxN8paeuPRiAHxwG3Ac8DKLdbtvu3H4gCICACfrZzWZjy7Ee/ObzuV3DM45eZsOz4BDs5QBrwXUJ8Mfj0TaUlWTX9ku/jCHAyZI7Rkx3GxjLyWSix8dHrddrc3yMIp/PW0DGGeI4NodmPCXZPgqLxcI26xsOh9biFekEemE/Xz5I39zcaD6fq9/vW1FuuVy2TQX5W5yPwEpA5NkovGZHd9gmOmvB1GFHtAjGEQEvHJE5hHElkK9WK2N//BwyZz5wwJB4xoUlc5gw5m46nVpgee432FoURRfJAwkStkoXLQI37FcYPi27L5dLLZdLkyfEcaxGo3Fh84wHHYQAKuwYIAeUkWeQHMCGw8B6lpE9cBgX2Gh8GF/1gY+AdzgcLljvRqNh9gU76IvUJRlzCGBh0/hQLnfuysZnqIfAt57HLb7nXvEr4hUSFZ8YRFFkDDT2x7MjN/G1BPzeB2cYOR8jAUq+eHYPjEEQGIAdj0fzWz8HxA3GjfthvLBR/MKPoz+Ht9nkeL8jwcgEIxOMTDAywchfP0a+64Xrg3UphIki6Pk3TwaPN8UgCOxt8fmSHZOE0/KWTZEghgtwcDA5LIFLMoBgYN68eaNut6sgCIwdQTctyUCFCV0ulyoUChaY+R0TQ/tUjM3LJtgwbz6f22Z2nIMiSb9EPx6P7X48+8d5+TlabsbbyzwwxtFopO12q2q1agboQRqN+mazsXNQ0EsXKALr6XTuwrNYLDQejy3oMsa73e5CCgAY+BarzWbTmLBMJqPpdKrT6dzKlkASRZExcpxzvV6bphpggD0FSAAtz0otFgtrk4tTY19o31mupr4AmUQcP7UVXSwWqtVqpt9Gpx5FkfL5vLGlPgB5rX273baAyTJ/Pp+3xISkh4CADZIs1Ot1Y+rW67WxSGx0mslkdHt7a/eLvdHpqlgsajQaqV6vm83wfPl83rTp3o8kmcaaxIVkEN2zJKt1INjA5sIUSzLAg50KgsC6XJHQeJ8YDocqFAp2/81mU9vt1iQwBFE6hvkgSBE+cYVn9ckXz0kw5d4Jvj6R8geMo28FjIwmm82qUqlosVhYbQZdn2AuYT+xEX5H8AYAAApWEfyKBfdN3OOefGJDcs6zA0aMk48XfsWB//O9f6nCN6UnwEuO9z8SjEwwMsHIBCMTjPz1YySf+bnjF69wEag9m8HPPKvB4PBQXotKsANkpDNAYYQMnGc7eJvm39wLXWDoqIKkAUZxsViYVpN7YoK4X4oqYTHq9brp3llyPx6P5lC0DsVwABdfeBxFkTFFMJosr3JfOCPL2oArbBfLrxTo4jz8ezAYKJ1Oq9frqV6v29I95wQIYVxOp/NGf6VSSa1WyxyFsWfZt1gsajweG9PI0nS/31etVrNAgSECQNxLt9u1cUauIUnT6VS5XE6ffPKJsQm+iw6sBPebzWatzS22RODCjpBQYC+wO2j2sR3pSW9+OBysjgFHzGazGgwGenx81OFwsEABCw2gYWOwoz6B4prc2+Fw0Gw2s2VsQIagRktlgACpCLbii5Cz2axtWEnQKpVK1i3qp59+uggE7XZbqVRKi8XCgm+tVjPfSKfTxnCTMCCxgcUFbBmfKIrUbDYtsfL7mvgiYwDpuXwKMAHomGuYPLpEef8meWAe0PR7CQ8F18fjUZPJRKPRSPP5XIPBwOxisVgYgBMzfID2bBySjXT6XDeCHItGAfl8Xv1+X+12W61Wy/b6mUwm5kM8l/TEopLk+JhG3OQ5Saj9F3aOr3mZBuNJ3QPPge8Tn4lB3BN+wjhLly1yAZt3gUly/PyRYGSCkQlGJhiZYOTHgZHvOj6YpNAvb/PQ0lMLRh7cL8F5Xaa/eb90iqHz914T7AHhdDrZhNzf39s5N5uNJpOJ4jhWq9Wy5WwcDT02y5iwDYVCQfV6XaVSSePx2DbgA8AymYztQo6DsLGdJGOZaLXKsitjheFst1sdj0f7TCbztFcEQQm25XQ6GYPmg1qz2dR4PLYl1kwmYzKCMDx3/CGwSrJAgdMTNOhcFEWR7VYvnQP8bDZTvV5XJpNRp9NRuVxWq9XSF198YZITGLZisahOp6NGo2HMHewErCDzfnt7q/l8bgYL+wVDEkXndrJoxgHczWZjmvFXr15ZoCbQwNDBynNvu93OWBRvT2iUAeZMJqN2u21sC3aG0+52OwNd2tjSrhV2t1qtmq0jg8HOU6mUlsvlBSB4KQf3g89IT8E0nU5rPB4b04otcF720yFxgGFm3xMYIuQ0MF7sx0LwhnmGlRwOh5rNZhqNRsYUDYdD3d/fm+9htwRFGG58luV6gIY6AoLncrm08VosFsa+B0FgbWY3m40Vh8NY+6AI2JEo1Wo19ft9RdG5Exufg8nCFkhEkMlQS8EXNkSsAHzX67V6vZ6KxaJ6vZ7JVgBLEh+embhIUsQcI/cBvPgMCYq3gzAMzZb9nCH14VmiKLLNPolT3AuMLDHLvwj42E2cYe6S4/2PBCMTjEwwMsHIBCN//Rj5rpeuD9bjlwvzxunfuGGR/Ju7l1ewPM4yZaFQ0Gw2sweEKYEl8Ut8sCKeodrtdprNZjb5sGbS01KmX17k/tPp8w7z6MRbrZYmk4mOx/NmdL4VaLlcVrvd1uPjo52f5/BvuxRSZrNZa7cLw0BAxyhgvVjihYXYbDbG5LDrOQGC5w7D8270OCUOh8HQMtXLK3huv2+HJHW7XWUy50Lq6XRq165Wq/r2228vCjbRiqNVpnj3cDio0+koDEML8rAmsDxRFKnX6ymfz2s8HqtUKlmSAfPg5zWfz1srVb9TOfYBA4sdVqtVAwCckU5F3jHL5bJdk93RJVnHI5gt2gJjp9gmBZgw1QQ0/p5uULCjJF2pVMrYNtgtQAAQw2YBEiQE2AG+Rccm5jCTyejx8VHValVxHGsymZhPetYQezwej7Z/DfOORABJQ7FYVBAEWiwWmkwm+vTTTy0BIUlrt9tmwzBCSH0kmdaaawKw2BuJTxzHBnAAH13CUqmUASm6c7T7MF9cEz05HcAI8kEQ2N+QPMC4Y0uZTEbL5dKkW1wPwCNZuLu70+3trTHmo9HI5DsUdxP7AHySBuyH36/XaxWLRWN9iYs+8fG+SswB7DgviWkcxzamPqkj9gIkvt0wYE+chYFNjl92JBiZYGSCkQlGJhj58WLkB+lSiBHzVumlBzgYb6jcFBIJBhYNOM7kB16SGQ8MDwwWAyDJWCiKJ0+nk+7u7lQsFs2hD4eDOQWMBxppvkqlki3Xs7yfzWb12Wefqd/vm777/v5ew+FQmUzG5A2wjkEQWHEk4FKpVFSv1zUcDm3SYBUI8rCJfpl4sVhoOByaPIOlZJ4nDEML4AQDNuvzbCngy9hhNOPxWG/evDHNdRAEajQaevHihbFZ19fXxgpSnPjixQsdj+c2wujb0SwzFshW/PxIT1rb0+lkLAnsDDvWR1Gkfr+vq6srC6IwiBS5Xl1dWUD47LPPtFgsTKrBs2y3WwM6mChJ5oAwbsfjUfV6XfV6Xf1+35a+qY1gc0uAaLlcGrOH3p3f+9aoOOTpdLLuRoAH4HI6nff3mM1mdl4AA1uWnhi8arWqq6srNRoN9Xo99ft9C2YEnvl8bvrpZrNpiY3fBBSAIyGCgST5wUd5ziAIVK1WTV4Di8uzMM+SjJnmgHn0RcnECAJ+GIZWpzCZTDQcDiXJgt9qtVIcx6bR/+1vf6tOp6Nms3kRcEkiYIzn87kqlYra7baNg3RmKJn75XJp9+slFUijYOBhQdGO12o1zWYzPTw86OHhwVhdEh3kEjBrHD5B9qsS0hNzN5vNLsZ+u91aAsBzsBoBoBNbYZrL5fKFzAUwR/YF6CLDIOaymgLDR2xPjvc7EoxMMDLByAQjE4z8ODDyXccH2/hYkg0IxsEyqfS0LwIaVF9c6IOPNzi/TAfbxUNxXpYYcUiuz98xMQT5drut5XKpUqlkBkGXF36GDnu5XKrX60k6OwYbQ3Y6HY1GI2OiABMCNEGFnd8bjYbq9boZa7FYtGDhJSP8fafT0WAwMJ06zwYTyr3CPPliRwCSJdPT6WmHdPYm8W/wktTpdDSbzTQYDFSpVKwgkWLc4/Gofr9vzMJ6vdZut1O/379gTCmYxCApdN5ut3Y/dPKBraAYEv3/8XjUbDYztgGWkk5Io9HI5BzUBlQqFX3zzTe2VM3+GqPRSOVy2SQrBPf1eq03b95YsoCdTCYTG5t+v6/5fK5vv/1WlUrFClaRF8DwIa1ZLBY6Ho8GehRNE2xYXgfsYZEJkizT+z1HttutbWSJJvrm5saCO6xeHMeqVqtWaAsLTFG0T+RIBNDiB0Gg29tbY3ZhcwnsyBO473Q6bTKU0+mkh4cHffbZZyoWi7q7u1Mul9NoNLKNQulmBpsHY4kN4e+A6+Fw0Hw+V7Va1Xa71ePjo0kiUqmUte/NZrPqdrsGwCSRSGJIEGezmZrNpiUTzJtfYSBppRMVKwvpdNrAFTAhoQ3DUJPJRL1eT/v93mwAm0ilUsYek2z74nFiHswocSqKImNtkZrUajUVi0VjUwFrDsbYJwBINgAqnpMmDCSWPAu2ir1wj14ehn8mx/sfCUYmGJlgZIKRCUb++jHyXccvfuHyA+KX7pkwb8iAzOFwuOgygsaXf8M4wcB4CcJzZoTrMjgMBGwaOs1ms2n3ixPx94fDuaUmrB0tcu/u7nR9fa1Xr17p22+/1Ww2MzZjMpnoxYsX2u/31maUjj04Asa1Wq3UbDZVLpf1zTffKJfLqdfrGVMDQzifz3V9fa0oijQYDIyRgPGioBW2CUNB48rSMIafTqdtXD2bdTwerVsRgfbq6sqMbrfb6euvvzZ2jbf6x8dHC6ydTkebzcaSAlgXb3iMbbPZvJDLsI8CycJisbDgCggDMAQuWDdqBAaDgQWIXC6nzz//3JZ8AVu63rA8Pp/Plclk1Gq1tNvtNJ/P7fNxHKtWq1nRKOMznU7VaDQURZEFQdgQDxRhGGo+nxtAI19AakIwoUMYLI4PpiRDvtgznX7ayd7vRfPw8KD7+3u12+2L9sTSOTkg2MOCIhEpFouaz+c6nU7627/9W/30008Kw9DsL5fLXfgRASaXy5lef7fbaTAYWGI0Ho9Vq9XU7XZ1d3cnSXY+2OjHx0d7PoIrrFg+f27FjF0GQaCHhwe1Wi0ryJeeNqcslUqWOPzzP/+zFbTj3/wfOQBjh/8QvMvlsqbTqd0PMQb2D609zwnI8P1isdDj46MajYYlv7Br2HK1WtVoNDKWLJfLGVuIf2GjBG7YZpi/QqFgtrfZbCxx4++YP2wyk8lYly/GggSF2MxnaSzgZSE+pgK4gCyJV3L89UeCkQlGJhiZYGSCkQlG/uIXLpb9+eLG+d6zotwgRs5ArFYrCwws6RGMYKMkXSwBEkwBKN7+JRlDsdlsbLkUUAFMCExoUJFJAAoEisfHR9Otc05YRpg5lirpqkJQ63a75jCr1coKDQle3W7XNLij0ciWutH5rtdrGxOClCSTR3gwZrmZ8SsUCtpsNmq32zocDsZmUJjqiwPX67U6nY6urq603+/1xz/+UdVq1RwdMPvkk0+M6ajX62bYmUzGgj4MLnufSDLGD3uoVCpm2NPpVKPRSJ1Ox8ZoNpuZPZVKJXW7Xc1mswtdeKvVMomCJGPLCMw4myRb9vZJTqvVMpkKWmIKfUlCoihSu90+O8rbILTb7bTZbNTtdo1pwc5g3HK5nLGrdPxCc87/uZ9arWY1EH6PFQI/4LpYLKw+guCGDCCTObcRPhzOrX+r1ar2+73u7+8VBIG+/fZbffHFFyYjqtfrNvZ+KR/bk2TJCUnf6XQyhppkYLFYWJJEYKrVahYE2ZsGaQ/zwb1KMsAfjUYXHb9arZaCIFCn01EulzNfIynabDZ6/fq1ms2m6a9JSJA2Ma5xHOuHH34woEyn05pOpyarAbxyuXObXQIvBzpx7MoDFpKGfr+vm5sb8xVsg/bTyFKILdgqSR/1IyR8MLCr1cr08JVKRZPJxP4WFtbHw/1+b8k5sQ6WnBiCHS+XS7Xb7QuGj7hNTCOWc0/JC9f7HwlGJhiZYGSCkQlGfhwY+a7jg0gK/Q0wkLw9Y1j83MsYeEukWw6sD0uEnBMWgbdXAgYHS9wYRqVSsc42qVTKNurzE805PSM2nU5NJsDns9msGTja2Ewmo9/+9rcWzCuVirbbrXWM4n4kmb6bYC6dC56n06murq602+3UaDS03W7V6XS0WCwuOiZhlExusVi86M6z2+2smPa5Lptnh/3C2Nh5fbVaWRDb7/eaTCbGYnJdmKfdbmdA3u/3JcnYKYKFtwfpzALW63UNBgMrTIVZIzBKshahnlXhOR4fH83hYFkajYZGo5FdC014FEXqdDqaTqcmuUDHm8mcN6VMpVLqdrvabDZWdM650XOzoSF6aHauZ4xpn0orXwAJKUYQBMbgsgkl2ubBYGCF6t4+kOkAmDBokkymQ1CiyBTtOTb18PBgcgNkN5PJxPYa+eKLLzSfz/XmzRv99re/1Ww2M3/abDa2iSNz52tOADjuKZU6t/S9ubkxyUmxWNSrV6/0008/qd/vKwxDDQYDiwd030L/jv9ja6VSycAM1jmfz5sEhYTwdDrpxx9/1Ol00meffXbRPhjNNvP+448/GgMFAI7HY2WzWdXrdfMhpD9BENjmlQRfErblcmnMMskH9SKTycRkR4XCeSNLLw3CB0n2YGdh57EVkg2eCTskIUIC4X2NJIXYB+MXBOe9gohvPBvj6pl/GEDuFWaRGErsZH6S4/2OBCMTjEwwMsHIBCN//Rj5Thz48ssv/+KH3nX8/ve//7JWq1283TFoBDQOBuH5zcGWIFvwmnRYVZgXjM/rjRk4Jt8X2SJxoJUtE8eSJFKMXC5nnZdgVQimBAY0qXEca7lc6pNPPjHn420bHTW63sViYWzk6XSybkiZzNMGf61WS3Ec6/Hx0QL7crm0vU54dpbO0dTv93trHwvzAYjDVO52O00mk4s52O/3tm8Kb+20rQVEWTpFSlEoFLRardRoNEwDLD11cPLzjb4fpgLJAGPk9ekEMsYQFtDLaqgnYLkaxomuQs1m08aNpXOA73Q6FxzD0OVyuQv2iudqtVo2jrCLBG/si3tEvgBAEBQIQv1+X4+Pj8pkzl2F2JemWq2aPdNSNZPJaDKZ6HQ6GWM6m81MVx+G4cVyP8GSDj3dbteW77lf/s2B3CUIzoXeh8NBg8HA9hjZbDaWvNTrdWsLC5sH20tyR2BFEjEej/XixQtjjI7Hc0HsYDCwoI7d5vN5q4GQZP7x8uVLq8ugwDyOY/V6PZO1FAoF/fjjjyoWz5uM0gUMoALwYKqQqMCoI1UJw3P3pGq1qlQqZUB6dXVlKwtIGkgkqZUgcYJNg7X3+5CQlCDTaTQaF+wYYwJY40vYEbEUm2KlYDqd2katvk03TDgxjVbFJAE8N2NIks9zSE8b83o5GuwfTCs2NZ/P77788sv/9T448TEfCUYmGJlgZIKRCUZ+HBg5n8/1D//wD//z57Dgg218LP357stoHTFsJBEswyGJQO8LOOA4GDjsChPhnYTl5NVqZXpKlvBZ5stms3Y+v2zLWzjAxLIlwZwAuN1uzVAajYbJJv70pz8pk8noiy++0MuXL7VcLvXw8KDZbGYGuNvt9PDwYBrsTqdj7JCXBwBc+Xzeroc0gaDL8juFojBqYRhap6M4jm2PEkAXbT7LrBgIHWHQSFerVdN1E3wqlYqur68VhqG63a4tb3NPSE2kc4ADoL38BaYUdpExhq3xxamSLJGo1WpWDAuDyXE8Hq0Lz3a71WAw0GazMTkLCQSdpBiLSqWizWaj+/t7W0pHduEZxCB4agvMM0hPbImXUXQ6HZPQ0MkIpguwhNVdLpeaTCYG3pIsqI/HgMI1tgAAIABJREFUY2sZC1udTqetKJ0kxd8vQaBYPO9vARiRvJCAlMtl08d/+umnBs7dblep1LlINwgCq7Ug2CO9wZ+QlBCUGF9kBoAMQI+PptNpVSoVNRoNk+mcTicDW/bxqVarxkah/SeAP+/cxD4vhcJ5PyCu12g0TFve7/dVqVRMh10qlSzZ8XPG+ZCD0J0rDEPztW63a1r0VCplCR0AvN1u1W63ra4Exh1bxH4BGdhoEhbmyhdtw3AydsRPzkXcIPhzT9g1MQFdPIADI++T9SiKbP8hAA+7g831MpLk+OuOBCMTjEwwMsHIBCM/Dox81/FB2sKz7MYAEdRxeiaIQOZ/z81yLlg49l6QnvrfE1gx0Gw2awOHcxQKBet8xFsuINRqtXR9fW2DR4Bg6ZLJh53hfvzSNUaNwfzpT3/SaDSyjjfoY9GioonlrRvWBTDlur6dLWMKc+CDKNpnxg9mhvN0Oh2l02nd39+bvMTLDfhaLBaazWZ2nTAMDUT8vVE8SxEzb/gAk2dbYSZJMAA2HJDPAIAUxXa7XQsSyA9gVAnksH0EPNhKbIcC3Pl8bswjLU7RX9frdWMzl8ulSUgKhYJGo5EymfOGlcy9X4aXnvaWYQxLpZJWq5VteFkqlbTdbk2uwP3BvrDkHQSBXr9+rel0aqxTq9WyBAB7o0MTMoxyuaxaraZcLmfM083NjQGUZ0pZwl8ul2b/tGoOgsCe0xeWw2hKZ1aNa1MwejweDdjxQ/xnMploNptpPB5f1JRgI8xfoVAwVpKkA/ter9d2TpKaOI51dXVlEhp8jODMeMFCUTNB4rVcLjUej5XL5QycSBSQAsRxbEzvYDAwlhcdPDbLvBKcK5WK2VGxWFSz2dRoNDK7ZZx8wwFYaX5HckXsIuHxhb5IS/BzAAJpF0XN7L2DbyKxYW5rtZoBMB3MYPiC4Gm/GcAcVp2VABjF5Hi/I8HIBCMTjEwwMsHIjwMjfSx6fnyQFS7+z0W5MMbob1iSDQZL9xRU8ntYVLSjXnaB1jKVOhfP5vN5czAkCNKTZAOmoFQq2aZwsIUsb3vAo6iQA42mfzOG6Xrx4oWxX+wfwlL74XBQrVYz58CIaCu7XC6tFecnn3xyIePAURkfEh0/nhgsY+W1u+Px2AwROQMBEL06zouEg3mYTCbGDMGy+mJQWJznS8acc7/fW9cfD/owWovFQuv12rTYs9lMw+HQAoO3H/6PHtwnIbCGsFLowwGb+XxuTLAHOFrTopHmGsfjea8U6YnlhJlhHGGTkfGwzL1erzUajUynPZ/PbZwJ/KfTyTokwZ4ej0criJZkrJ1nvOfzubX59fIYgJL9WJDV0PkLHXM6nbZ9UggeBFYSB4IxrB16aP+MjNV2u9V0OtV4PNbhcFC73Ta2uF6v6/7+Xul0Wu122zoRLZdLY7eQsURRZAwViRfjE8ex6vW64jg2ffbx+LT5K3GGgwQMH+P/dOuiuLler6vZbJq0ZLvdGkgNBgNjBdFs88yAIHNQqVSsEBeby2TOXY/S6bQVQnv/9Pu2YF/EHACOxAj2HjDFJokH+An2GMexBoOB7aPCtSlef97tC0AB5H3MJfkBSABp/J0YnRx//ZFgZIKRCUYmGJlg5MeBke86PsgKF4yAZwIYDByRz+KULJfC6DCgGC/tYGGVeNP1Mgj/b95W/QPPZjPVajVjmGazmX744YeLnxG0uB+AxMsyCJDL5dKW5wGiRqNh2me01RTtUviJw2ezWQ0GAyuKjOPYNg9MpVJWkMnyKQ5EUMaoYTJZOocVzWTOhYSMIbpqmDuAkWX0MAzt7wh2Nzc3Frjp8pTJPHW+gpVAt8y88EUwwuhns5lOp3MxJEYK+Nze3mq9Xuvx8dE218NpoiiyIAlQwXpIsuvR2YlrorvHNuiohV30ej1Vq1V7ds5F0GPn88PhYJ+t1WoG4ow/gTKKIjWbTVUqFZNvSOfgy/iwxL3dbtVsNtVoNC4SJ0APXTRBHOaGZXwC1W6307/9279pt9sZs+UZZoIpoMPzl0oltdvti+eez+d6+fKljsej7fvBnjjIKPBj/LBararZbFrxsZcv9Ho9k3oghUKHT/BleZ/AB8Ax1tgpMQNQQUsO+wfbizymWCxapyOkUQTs1WplyQLtsBeLhV6/fm1xa7vdGotKRyTkLmi8+Xkqde4O1mg0NB6PLbADZj6Z8omXv0+emwJ5/AoJBCD1nH3H5rEfYgbjxfewnPgN40QiDhNIkupBmjEE+Lj2XwKU5PjzI8HIBCMTjEwwMsHIjwMj30VKfpAXLpaOWdrniyVyQIM3RQYJUGGwvA7S61rR4RLsCQw8OOwe+lkGLp1O20DG8Vn/7ltQ+sJJWEK0qSyL8nPe6pFLSNJ4PDbWi24+dDfBEPk/z99sNq070Hw+V7vdVhzHtszNG7zXoMJ0UkjJGztMAn+DxrhUKl0s3aNh5m95Q2eOpHOXn+FwaEGVc/slexwOFpNlZ+/EkqzAFIMmMWAMUqnzBoej0cgYCcAJoGP5lqVm2MtUKqXxeHyhV5ZkbAtOjxNNJhMr8mYJHmZTkn1fKpU0GAw0HA4t8BM0CKTSuV3rfD63OgGvJ4b9bDab+uyzzy7GCykL9QfYK7IbHBv2lo5PjBdF6TzrYrHQV199dcFWxXFshc0weNQwlMtl3d7eWiCB5VwsFhbs0IITcEhGSAZ8AoIPkzwSnIvFoiUMXoedz+dNUoI/eZuHqWKu9vu9yTnwU2wR0IQZJngzxzB8sMTIZThHHJ/bCff7fWWz58J12GSeS5LZBYkdXbKIQ/g20izAEwDg77Fx/k8yiI9jZ3Ec20aqzJ2X8D2XdVEUzPyQjMLOsaKBfXlWnDln/LAfknu/cgBb+JfkEsnx80eCkQlGJhiZYGSCkQlGvvOFKwiC/zsIgt67PiM9abZZ2oTp4O0PcPEyCRgaDPG5vMFLJOI4NsYN7SQSBzooMTgUccK0sLxMMGQJEcPyUhmCGcbJvVO0BzME0BHI2UfCB2uCRTabNYP2S5CLxULz+dxawqKDhmHwB5Max7Hm87lp4NGs+jd4ijdhGAnUODRMFDpwAjBFyZKsEFrSBSDjTO12W/V6/WIjPcAR8KfwkMCA06D1nk6niqLIOuHA3MCkwWowd0g6YBOjKLrYIwL7gd3CUVmiZr8U5AwESqQwMHDYI1ICpBQe7HK5nBqNhsrlsn2OLkrFYlHD4dDY2lQqpeFwaMFJkur1+gWzikwijmNNJhPl83ljdLGpbDardrttxa0UOcPwMu9IYxjLMAzV6/V0Op0sSKFZJvi9efPGfIgvAhnPjz/iE7TyLZfLZnueZSdgwwh6wKEQHqYW6QYyI3w8jmPbB8XbF4XSgBJxAMBjXGD0Yd2QMBEssRM6PrEvjvcrYhAst4uN1loZ3TuJLvFEkhXd+8SVuWAco+hpM1zGDuD0yYIv4GV8JJkv73a7i/oX5g3m9Pm/kREBxNybj+v4lY/ZyfF0JBiZYGSCkQlGJhiZYORfWt2S/sILVxzH/18cxw/vPIOeujB50PCgABuw3+/tDRm2Agc/Ho/WD599HPxbpp+k0+l0AQL+LZQ3W4rlCJzs/cASPgPOxDIhDBgOVywWbZCRTaRSKZv8yWRi3Z0wyO12e1GY6JlMWEXpiTH7/vvvDdhwJJzFs3ncB+fzAJLL5WwPBwI1IA27SIEyhk3g94wHQOmTAApZAR/06jg7RYkEnyAIrKAUh6AYlXMQyAAR3xKV5/ZARuBFz346nazlKVpv6akugU0gCXDMCVKVKIrM+Rlj2EgYDJIf2BiCFfeKU3qZA8GOQtH5fK5SqaTdbqfHx0djVZDSSLJAiF2hlZ/P55LOIMQ+OfhBLpdToVAwtgf2R5Ix0J1OxzoJ/fjjjwZyPCuMItKObPbc8hYGmgJxfNcHe4IksgCkKt1uV9Vq9SLwebuAIQeEuRfa7JJAlMtlux4BFjbOy4MIhF6uQA0C+nTGFc06No68CdYb5heARp7BPcCycW5YWHzBJ7JcD5v0wOAZML9CAOvO2PqElO8ZC8/CcQ5AyNv1ZrMxn4eR4x75LAkDsZN5Ye6JP8nq1p8fCUYmGJlgZIKRCUYmGPnXYOQHkRTiZDwEN8X3GBzB2y93crNMGMumDAZBhIcm2PO3nnFjUPwk8XMCDiwaTEscx8YGAkphGNo9cj/c02g0MmNhAuhyxHPDkAF46IMx0EajYcvk3333nd0butTNZmM7jZfLZTUaDTUaDbsPHJVla9rmLhYLHQ7nTQ8JeDBUBH1JBggE7UajYSDIsjDjRrE284dUIY7PS7sedChqJlDTtQfJAEvzgBhadpwChqrValn7WkCfxAPHrdfrpnuGyUPfzv2jLaYdLhtmsnM4dktRahAEurq6kiRjfAA05m+xWJgchGsAtATcWq2m0WikxWKhOI5t40Xmmb1sPBB4MIXtJDiQHPnNT5GHsMcHshgCpiTriuTb+7KvCmxPOp22+adQmHEEiAHX0+lkUhq/+3qpVLoo6idRRMufyWRMFoEfU0cBGOPPjIEHYu6BuQBUuRbPhh3RycgnsnRAG4/HNoaADUzm6XQyZtEX3PukFjY1nU7bmMGSM05ev059AQEamwOwJNnYHY/Hi31LYM0k2XMR24gXjBljge16xhVQA8z5HHHudDrZPcJQepkEzB4xLTne70gwMsHIBCMTjEww8uPAyHetcqX/w9+8xwHjgdMRSGFveDjPSvEGjiGdTuduSjBpfhO7KIouWlz6ZUXOyUDgUH55D6YCY1itVlqv1xc7uxOYYRVYKgUk0LTjpHQ1obiTyZtMJhcFx7whc49cB/bG65Ln87mazaa14ywWi2q329Z5JpVK6fHx8cLg1+u17c7+6aef6nA4mDZdko0lhoFzEJiZH4Aljs+yBlrHUjAKYOOgg8HAAI7lfIyU1pxonXFadNIAEYWkMKQwlDCyfnz4HWwl9kBgORwOJteBYWXu2M9jOp0akBEED4eDtXGF2UGTTIcoEhYYPJyLYEwggX3JZrO6ublRoXDe1+L169dmkyQf6NDRGBNYqAlgaZ+fE3wJzoABAYz52+12xoTBVMP0cX1snkALcw1DRHchwJTlfB+kkAkx95z7ue8gnWFjQ1qy4gckWLDhyCfoYFWr1Yw1wm58bQGrBMhpsBEvd+A6PimiUxS+7SVNsKveV7LZ88aNMH2r1Uq53HnvluFwaIXrPviT+B0OB7sfxokEhOfCtwCRzWZjoE7y6hNYQJC2wcRXNPvIlbgm+7Qwn7DjPgF/vkLgE3NsnntOjvc7EoxMMDLByAQjE4z89WPku1a5PtjGxxj28x28CQoMXj7/tO+HlygwsAwQBo+2FebDDwJvzZJs+ZPBg93yS5Qs0QNagBqB3TuKf/MncGEY9/f3enx8NGOklz8TMBwOVSgUTO/Mc3Ev6KrZSZ0uNcViUdPp1DZ+hPUiABI4Adh0Om07wqdSKdNhPz4+qlqtajKZaDgcGmOIZjmTyRhLAwNVLBat8JHzU9jJRpVsnAlrx2d4LsYjm80a+OEIzActZPkZ80JQgHkjwDwvrOZeCXYEv/l8bkwrDotTAz75fN4Kk0lksCmfqPi9IB4fH40tpLMWBeMEaB/EGIdCoWC7yO/3e9VqNVuyxsYBL57LL4uTmOFLvhPSeDyWJHsmzo12frVaab/fq9FoGBvD/iT4CfY5nU61Xq81mUwURZHJKZCXUPxNkG2329put8bMMlc8JzUIsFMwXfgAMgx+zv/DMLT2ufweP43js2SnUCiY7/hz4pfMO4ksNgariGwgnU5rtVpdbLaJvImkh5UGwIFicubveDxaa2nikS/kZX8f2j+T5PA8MGeeua5WqybrIknmHhgn4hVMHufGfj0r7VcdvOwFX/TMO7Hcr5IwFowz50+O9zsSjEwwMsHIBCMTjPw4MPJdxwfp8evlDyz7Mshob2GsuGnP0HiDk2TaTgaChyQIMIAYEA7H0h8sAAMFoyLJgjF/T2Djs0gEcEwMlWcKw1CNRsMKPuncwrP3+30rsM3n8xdjwbUJ5MgaaBVK29Q4Pu+dsVwudXd3p+VyqVqtZhPrl2nr9bpms5na7ba9uXc6HWNbqtWqer2erq6u1Gq1VCwWjZ0AzDFCmIMoioxFAlCRD7C0ig48iiKVSiXbxdwv66Jfhtktl8tarVY2bul02kATsObzLNUXi0ULwtgYDApL0WysuN/vNZlMLhISmKUwDK3gE50zzCLLzgRzGGWCB8EaBgNb8gwHthwE5w0T+/2+MW+/+93vVKvV1O12dTgcNJ1OLQDwXOzDgYQA+4dx6ff7SqWedjYfj8d2H8+LPWnDDLuE9KhUKl3se8O8eIZVOjPSsE88m2dRkY7AsBOUsR/06vgs+2rgq1yHoAX7TyKBXXL+m5sbpVIp20ke2wS0ua6XogAGPsBio8w3NsIzkEyQ2JxO571JYNe5n/F4bMmzByG+kF7hj5wPf/HzyvwT/JEVYX8wzd72sFNiFp/x7CAMKXOIHz9f1eCLmEmC7Ve0AJJETvifPxKMTDAywcgEIxOM/Lgx8oPUcGEMvrMLA8QyKDdHMMJgGHRJF5pKr6EkmMEG+TdL3r7ZEM4bEcuo0hNAecYP3SYFrwRCzpHL5awoE4YC/SnLruwQH0WRHh4eVK1WDQiOx6fOSHEc2+8wdpYzYcVwUFrp1mo1bbdb3d3dab8/b+hHpx+WfiWZA7PnAUzTy5cvbed4H6BzuZwVZ6PfHo/HFjCm06n9niViD8Ze71+tVtXtdrXdnjf7Y24rlYoBBSwOTsX+LIB8JpOxvT2YT9gP5oGxAiCZv/1+r8FgcME6sIEh94CjMgZs+NjpdIxN3O/3WiwWlgQQHKvVqslNfFE38xWGoer1uv0N0odaraZer6f1eq1Wq2WMFMwKOn4c+XQ6GZuIFIbkw18/DM+7oZNcEADY5JK/bzQaKpVKajab6vf7xhRVq1U7F22aYX5fvHhhTCqbKgLMsPLYng++0llCJMlqLNgsMZ0+1wgUCgW1Wq0LW4Kd58AGsJkwDI2JZCPFVqt1EawJyvgAAd8nqTDz0jmo0pWNFQBiEgDiE1aY/clkYgEeu2y1WiZ74VrIX3zXJYJ6Op02ORgSLb4AfIAYHwNwYBT5PPGVJI84DMB44OD8PD8AyeHtkL8nxvJvmLtkhev9jwQjE4xMMDLByAQjE4z8xTVcXorgNaYwb2jG/SDwpslys2dtcDQ6nCyXS9udHIbPM4MYMIbvtbIEfSQQp9PJghJBCwAjUBGkcRze8ikmjePYHDwMzzuP42zD4VDdblf1el3L5dKciCJPlvWRIVSrVTOSIDi30Gw2m3YvGGwURQZ4SCgKhYLpx6+urjQcDu2tnyVkNps8HA4WJAFQv2Eif8N8cR7flnWxWKherxsr6KUMgA5slyQbv+vrawsYq9XKNgM8Hs/drSgShmXYbDamdWcecSp0+uh00WHTVpU9LnAgAj8SERIJlrr7/b4VUfOFpIb9Q2A3YWiYGxybMeFeWIqP49j2k3l8fFQ+nzdmjufmeqfTyYIQzB1jBDtFUAeEqC+YTqfmaxyeaWPzSZI9L9fAH2EN8S8/5gQQ/DKVSpmun45pSCs8K4gGv1wuXySIsK9IQthvh4TTd01iDNLptDGqHmiw5efSAthWAO1wOGg0Gmmz2Wg8Hl8wigArCQWSER9AiQflclnT6fSioDwIAv3444+SZAkFtsxYc27GmJ8zT77QFyDi2QA24tzpdDIG0ssZYDAZDxhWz8pyPs/UARiSLuYB9hhtPayjX2VJjr/uSDAywcgEIxOMTDDy48DIdx2/GD1x2MPhYPtv4ExIJWD1WE7c7/fG+BHIMHDe2LPZrBXdbrfn3a398p43UCQHAJqXYzC4GCCT4XWdvP3DGhHoB4OBJpOJ2u22MUhxHKvdbtteEOx70Gq1lMmcN41EQyqd95OAlYFF4G28Xq9b4ehud94RHRDCKJFgAGKffPKJisWirq6udH9/r3a7bfsUeK1uGIbmyBRBExRub2+VzWbV7/eNDaRN6nQ6VT6ft2JpnDqOY11dXdncMfckAux+jjHiMMgq+DyAxblhMn1yAGhjMzgTrFY6fe4a9Pr1awN15oZuQCQkyBPYbI8ldgIGwZ+aCcCUILDdbvX999/bNWDngiCwugTkJ4DOeDw2ycp8PtfDw4M+/fRTY4cptJVk7JIkNZtNW4oncWDsYZGoeQC4W62WdfhivH1gJyADwNQgkNC8ePFCp9PJGFuuS9BG4w3DBVPJPNBGGDY7is7FtpvNRo1Gw56BMdvtdsa+AngUXTOXsGrYYqfTURzHGo1GtrdJqVQyEMHeSK68NIGagl6vZ9KaOI4NGL0Gng0YfVtcwCgMQ63Xaw0GA7XbbQNxmNrlcnmhJU+lUnY/nU7H5DnsAYMEA/8i2SZ+EbyxUYr/vXYekMDXiC/4If/+ufPxO66NnyFF4TrYKd/7pCU5/rojwcgEIxOMTDAywciPAyP/j65w+cDujZ8L0+2I5Ur+z/dxHNsD+OVeBpo3eQbCa6j5OQ7N7wjwaL7ZGR3tdy6XsxavgBAGwyDClPAmW6vVVCqVtFqtNJ/PzclyuZxms5kOh4MtQ/L2zaSPRqOLpf1c7rwp4HA41Gw203a7VbPZ1Hq9tnOhpeUNm/aftVrNghUAy33DYCA7SKVSxhJls1k1Gg0dDge9fv1aw+HwQkKBrhemlPmBCaAjDkwRDIIkA+p8Pm/LzzClSCdSqZRJEmD6OD/nSafTJglgGdyzFzg43aVw5kajoUKhYOODTIPagdlsptFodNE+FUal3++b7IYiahxzuVxe7MmBfGK/31vwoIXq9fW1RqORgiAwe4/j82ak7O1RLpfVbrc1GAzsWQAKkhyWvUnKarWaBWmSpGq1qlQqZQDgC8RZ2gesOScMJgxaKpUyGctms7Hl/fF4bPUX7EnDdbEvH5QI5nSf4jnoTLZcLtVut20eac3c7XbN9vDfQqFgG6QiaYBNa7fbCsNQzWbTWC6ecbfbmW/7IHo4HIy9lGRBXnraMwgmDhkRch4Y52azaZtT7vd7XV9f682bN5b80QIZVhObx+8k6fb21mQ8jJGPc8QNpA/FYlGTyeSiboKATkxk/IlPzA8xFMkIfsi5OR/XY275e9hYEnEvXcPWkuP9jgQjE4xMMDLByAQjPw6MfNfxQfQhsDcs6aPVZDK5Kf8ggASTyySw3M1EeA03bBS/44FhujAsBgXnYakaNhCDR+7Q6/XsdyxfHg4H5fN5e+sfDAbGvK3Xa9XrdQvcDH69XtdgMFCv19PhcLCN+dB4V6tVY03y+bzu7+91OBzU7/d1PJ47N+VyOX3++eemt+cZTqenQmW65rDx3Xw+NxYgCALrklOtVhWGoebzuQ6HwwXT2Gw2rbsQe5jA8tDOE5Bip3TkK6VSyQASZpClceYXnT8yk91up9FopOvra3v+6XSqSqVywSBQFErXK9gq5B2FQkG9Xs/2ygBQCCAsD1P4ikMC5MVi0difwWCgMAzV7/fNThuNhoEnIOa7YTHmSGFyuZzu7u7UbDaN2Xv58uUFK/l3f/d32u/3xgwTCNjokqV0QBz2EC03zwn47nY7PTw8GBPU7/fNpyRdaK3T6fRF56fBYHARICVZZypJtveND06ASKfT0XQ61WAwULVaNZaLwA+oMfaz2UwvX75UpVIxuQ5F74vFwgrBc7mcbdRJQlGtVm0PFp4ZbT0Ja7Va1c3Njd68eaNisahPP/1Uq9XqQj6BFGm9XqvT6VhsoM6BRGo6ndpcPz4+aj6f69WrV4qic7vt6+trA0PYsZubG9tYFPaSVtWAMcH6dDrvz0LtQLFYNJsjFpJgMRf4BYABICN/4W/RzcPcw/Tyb5I9it9JIpBL8TKADz2vG+LfJCfJ8f5HgpEJRiYYmWBkgpG/fox81wrXB9GHeNaDgkL009wkb54YPYBDgK5UKvZWGwTnLjYEA9gLgqWXQ3Ct4/F40RYTZgAtL8wfDsCy8Gq1Mj024IDsAfBj2XU6nerrr7+24k6CjST98MMPtrHgcDi0VrbS0y7oktRut5XNZq0LE+AHILGkjkZ6sVjodDqp3+9bUeXV1ZUKhYJ1IioWi3a9TCZjhceedQEsNpuNms2mrq+v9erVKwvuzFWlUjHWpdFoqNfrGTARuKUnps3rpVkefv36tXUZgjEhwP2cwcNg9Xo91et1M1rPWsHChGGoyWSixWKhKIqs4NnbgHS5gV0YhrYzeTabtXqB29tb/cu//IvCMFS32zWHxvn4O+51u93q4eFBh8N540yCY71e1+3tre0Pg7QAx8fWCW506EIWAHONhAbwmM/nWi6XFqgA29lsZnZD/QIyEMC20+mYRht2qFgsqtfrmc3Trpg9awg2SEk4F5207u7uTEIBs5nNZnV1dWVBEGAhSchkzpsz4re5XM72tnl8fNRkMtFkMrEaA1j2h4cHjUYj5XI5/c3f/I0Feuo9kPb84Q9/0GAwMIkPRcOffPKJMZl0LCMZC8NQg8FAj4+PkmQSJeQQknR9fa0gCPS///f/1jfffKPJZGKscRiG+uyzz4zNv76+Vj6ftz1XpLPE69WrV3r58uWFT0dRpNFoZAnhbrez71nZQLpFDQFsqV8Zwa98soDUg2SJYnjsj3jw/LzUREiyVQySfHybuIn0KTne70gwMsHIBCMTjEww8tePke86PghdudvttN1ubfd4dJgUP8Kyeb0nb9XoPpvNpjqdjskKCDgs96K/5u9gWjFSpAuSVCwWzUlgk+I4Ngaw2Wza0iEggr6aSWPgYJ/82y+Tvlwu1el0tNvttFwutVwuL5acCRq8FY/HY9vAkG5KBHqWiGGrcrmcLa1WKhXbiZ0xY5Jhwnq9npbLpfL5vI2zlwYQ4AAZNMytVkuHw0E//PCDMUk4yXBP0ys9AAAgAElEQVQ4VD5/3nkcPTBHp9PRer3WdDq18eH+cSq0wblcTs1mU9Pp1DoFUQS53Z7b9hJkMXhYE3T1aPp5Jpiy0+lkLBhByndKgtWVZA5BgP7d736nh4cHffPNN/r7v//7iwJzOlIh8eAZ0L7DWt3f3xtzNRqNdDwe7X7i+LyfzGg0MmaW3/P3XhsMiMKeLhYLYznRKBMEYUczmYzJbJrNpiqVijF9vtA1nU4bQ814khAxttgWRel8DuaIoMS9w+zW63VLCJfLpcrlsm18CqPl61CQO6FxB8S3260xrJVKRe12W3Ec23yRhAKOr1+/tvsNw1DfffedjsejPv/8c7s/6hPQuMM+4++SNJ1OLcnNZDJ2/dPppF6vp/v7e2MfqeuYzWZm2zc3N7aHC3uxYIuw9STDBHHAg2QFYCEBIt4R5Dn8RrR8Br/gnr2+vNPpXDQhQDePn/ESBSvntezcN3UZ+FNyvP+RYGSCkQlGJhiZYOSvHyPfdXyQF64wPO+74Q2VzjoMEEEgnU4bY0OQp1Cv0WjYEr5/WN64Wb4HnHg4zs+mhlF03k+BwE9QIwCNRqOLvSsOh4MVaTJoLDNWKhUrGpzP56pWq6YhZyn3eDyaoxJMHx4ebB+NbDZrjsHzrFYr1et1HY9H1et1ff/99+ZoFHsSvNDM0gmFfSP2+71tZjifz3U6nTs2AU6wWujTCaqwEASR/X6vXq9nenWek+d79eqVUqmU2u22sVAERvYs4b7YNDOTydgyOTun4zSSTKcehqHG47ExMjCryBHK5bJms5m1D2WZl3miVoCAyzK+L54kCclms3Z96hn43Y8//qhOp6OHhwcLfARcwFLSn3V48rUI4/HY5ubly5f66quvjEWm4BgGGz1zFEW2oeJ2u7UNG2EZ8QGeA/sgYGUyGU2nU+tchZ/5pXZsBfkJjFoYPm1ISHKGHIc5poUssiNsQpLZDvIhahl8ggeQ0akMxpfPslzvZUSwXLBsAMg333xjzNxgMLBzBkGgyWRiY/fq1SudTifryFUoFDSbzdRoNPTpp5/q4eFBqVTK9OUEYgCO8YXV7HQ6xvAh28Bu0M6jj6eoGOkDYH04HKzQPZM57z0ynU51d3dnbDsMN53PsJMoii4SQRJsEm6SRcY0DEOrsSAeI39iLn2tAbUmxGWSOZ/EY/PJ8Z87EoxMMDLByAQjE4z8uDHyF0sKAQUuDFvAmy3BNAgC0xEz4OgjJdmbrte2E0RhS2AbkDrw5rrfn9s/Elhgg3AujNgPCNre0+l0Ia9gApE0oLGGQfMMTxRF+uMf/3geyLcaUpbeKbxFXsBydy6X02g0MnaKoBeGoXWPCcOzbh52b7fbmaQDAF4ulxYghsOhGeBisdBPP/1kQQiGaDgcWvcZlv95k5/P59rtdraXxf39vb3Vo7UPgkD/+q//qtFoZLrsm5sbTadTDYdDDQYD3dzc6OHhQUEQWCeh5XJpMgPqC0qlki2hw/7QladcLqvRaOjly5fq9XoGLq1Wy1ib9Xqtm5sbTSYTqzuI49iCIAzyfr/X3d2dLSmjTYZtgoGpVqv64x//aPaz2+0saBQKBWN8COowaVyHZfwwDE2njxSIAINfeNkJRd9xHKvb7RoQE9Qomn14eLDgCdPdarUkyWQ9yCx8APetaLE97jGTObc0JsmCySYwoenHZo7Ho9Uk4O/4AtIfzhuGoRWgM84wR4AH/4fN5tq+m9XxeFSlUjFfnE6nyuVy+v777/WHP/zB5Bdo8Un8SNSazab58HQ61VdffaVCoWAyk/3+vAlop9NRqVRSu91Wt9s1qQeM/HK51MPDg16/fq3RaGSJHXvYwBI/Pj5exDbkC/7ZkG4Q0F++fGkSikqlYvsKIW+hjoGkkFjnpVxIbrgPgA2W3jN8xDdfC0AM9jVa2BFMHvbhGfzk+OuOBCMTjEwwMsHIBCM/Dox8lwrkg7xwccMs4xHk+D1vrAwazsiN0uUFEJJkRaA8tA/wBE7e6mE5mDwM8vHxUavVyt66cRDeXGHs6OACULF0TQvfWq1moMl5pPPye7vd1maz0WKx0HQ61Wg0kiS9fPlS19fXph3PZrO6ubmx4EdhKQZxf3+vOI41mUz08PCg6XRqy/QwFKvVSre3tzb5/B8H8su3d3d3qtfr1oWoUChc1ABghAR1ghr64+l0qm63axp8jIlAOxqNrHAVQMZRl8ulZrOZMpnLjSdhgNB1M1/X19dqtVpm7LBldDlCekLgQZ+ey+X06tUrFYtF9ft9C1Kwf2i2PRPhzwuLSUvlr776Su12+0Iig24fVgVtL4WnyC+wD+mJVabwerfb6f7+/qKQk2J5EqfpdGqMG5IHZAT4B3PIWAEq2WzWbBjfgZmTzgA9Ho81n8+tHS1MEcvjMELM1/F4tGujQ6fgu1AoqFqtKp0+7/0xHA5N6gJLW6/XLeHLZDLGTMLscU3qL5DPdLtdq6HI5XJ68eKFyUJ6vZ5Op5OBAgwxwESh89dff23SJZKFIAhsDtPpc3e2MAz1u9/9zsab4M08EsQp4gVYoijSfD7XaDTS119/re+++84kLIANAd8XL+fzefMFVgqCIFC327XVD6QhxCV8j8ScOMk4ezYZkC8Wi+bnSK9Itr18iHoHEjzqeBhvL1djNeMvdWFKjj8/EoxMMDLByAQjE4z8ODDyXS9cqS+//PKvxY2fPX7/+99/WalUTL+Nc8IywcIRoDEwmIRsNmvsAG/stM7F8Vj+Qz+KxpNghdNhlLxlEpgYHAYbloQAg/Mz6JIsyKMX3+12ViyMTIDleQp+YR5xZDoTZbNZLRYLW95lgh4fHxUEgRqNhu7u7qxQFe08RgETxPPlcjmTawyHQ7tHDCqfz1snqPF4fFEk+1yuslgsVKvVlMvlNJlMLIC2Wi0NBgP99NNPevXqlR4fH419Ywl9MBioXC4b6OKoBOJOp2NzlUqlNJvNjDWB1SEgSDIgotNSLpezZX2KklkCRppBV55cLmeaY9gqnMOztnRy4vlhh3u9niaTiQ6Hg90j7BrL+pKMwaWYlvHk94VCQfV63ZaywzDUaDSy8SkUClbPgb2zJA4jBgtHJ59s9rxHxn6/t7nnfAAiCYsv6qUbELp5xo97pkYCeyD4kGB4mQZ7plBki2/ClvLsJI+LxcISE3ylVqtZh6tyuWzMGywfCdbxeDSbTKVS+umnn2yeb29vDUjpJBQEge19Isme9/PPP7cW00EQWBc04k4qldLLly+N5adwFzafcYHNy2azur6+NvCGvW00Gmq32/rkk080n8+13+8NKEhssSOKuWHzuZcoitRoNLRYLHR/f29AQcIL4ybJ2k8Tk2ClOXxnJeYIzTsJPKBO4kH85Yu/41wk2ofDQev1+u7LL7/8X78IOD6iI8HIBCMTjEwwkmdPMPLXjZHr9Vr/+I//+D9/Dgt+8QvXP/3TP33JGyRBzzMHDAJOjoPBvsGIIXvg5xg9xsmbJiwGE+A1lgRelnG73a72+70qlYr9LZNHtxjYOAyRpW/aw6IbZUmeTj8MOEWXs9lMq9VK7XZbjUbD9J5MxHq9Nt030gCC6W9+8xtrnct+HGxOCGiMx2O77nQ6tb00CP6wD8vlUsfjUS9fvtR0OrW3/NPpZOyLZ6MI/hgOe5nAyHz++ed272j6mdvNZmNO32g0LIhLsr0luD9kBWij9/u9tSCVZAkEtkMXIGwHx6AOoNPpqFqtarFYKAiCC2ZlvV5b0EUmgGSGc8LuAHblclmpVErff/+9fvOb39g8+V3esRFkJmjdpXPCw+7wXIdiXdq5BkFggOABFElPsVhUHJ87hdXrdVsaRw8dx7FarZbNMa2KwzA0jfPpdLLWvPgbxbHY+qeffqr7+3srQo3jWJ1OR9K5Qxj+TAJWLpdNBkB9APf2/7P3Zj1yZcfV9sqTJ+d5zhpYxbHVatlGw4AFX/kz/LtsoF/Y+hPvv3mvDBiwLclWD2yySdaY8zyP30X6idpJS4Tobt+I5wCE1GRl5Tl7x44VZ8WKCLT5OGCCKDoxsb7sK0yk9FCgzXnGVvg59o/ubi6w8d9u8Mj5931f7969Uyh0aP9MUf56vbbgDyaRIuRisXj0jLBYgBv2ms1mjxhvAtX3A0kyGPi7yWRiAfZ8Ptd8PreA1mU98Vewb/gdpCjvX9Tj8J3ufCECLz5HgCo9tNPFx3LhUwluWFsXZIIXro+7AowMMDLAyAAjA4z8NDBysVj8wReuH900g7dBQIHFhbnj7RU5gyR7a3elFHQugWnhoJPiI7242RzamqIBlmRvybAI8/ncBtOtVitzvL1eT6VSyZgw9MfoO0nhs/CuhpY3aNL0OE0YvidPnhxpO9HYJpNJcyCuZGE0GikSiejs7MzmV+RyOUshD4dDZbNZc0gUFfu+r6urK+12O11fXxuLBeMDS0VxZKVSUafTsXWDnQiFHgoO0ahGIhF71tlsptPTU+33B913uVw+YrDm87lqtZo5B9/3rVsTBwkW1w0EisWiyuWyOp3OEdDBhHAfSCfQi3ueZ4XbSGnQI7P3ODjfP0xmdz8LewyjJ8lAhxR0KBTSixcv9Nvf/la//OUvj9iNcrmsWOxh0Cf2CfOLfbqsHgwWTvb9QmOXSYH5wpY56BREr1YrG8oJsxmJRAzIZ7OZwuGwdbkqFotKJBK6vb2178R5Ij2JxWI28BSwZb2wKRyrq22GIURCg5NE+oDmnfMfiRzmjcznc2NM3boNGG7qg1w2HjmV5x2aDuDUKLhlf2Aa3eLk7777TrvdTufn52Z/2WxWL1++1Gw206NHj/TDDz9Y1iGbzdrZpR0uZ54gtNVqWbc5wIRAw5UhsJfIi1wpED4Klp5noSYCLfn7QTKAiQ3GYjFrgkBQ5fvHmnPOH8EvZ4z7w9/hr0Ohh2Jm9lJ6GNz6IblEcP3+K8DIACMDjAwwMsDITwMjP3T96BouvtyVKHCDSBn4O9cgcQ48KI6Y1DoHGYfPYXy/4whvqRg3bSfpSgMgxONx5XI5S1XTg58FZOPRlrJRACEOksPFYYcJWiwWptlGnsDBh8Vytftow3F4AOwPP/xgAwthcyTZ7I1cLqef//znViwqHeab0BUKpqfT6RjTQGqe9WZteU5Su7Bzg8FAl5eXymQyJiNiEB+djkqlkmmrcYywjPzezebQ3tVdY3fOAy2SXfbh98li2CuYB/YUp4cDoLAYRxqPx42t4QBvNhsLXHDgpJyr1aq18X379q3ZFLp/AiVXYsOhjkajGo1GGg6HFsTQ+tf3fXvu7XZrNgCTSSEz94zD3e0O3ZmYPL/b7axQHPvkTC0WC3U6Hb18+dJAdbfb2aBVHF00GrXZI/l8XhcXF3r+/LkFdsvl0rTynDFYXzftj2aZtL07A8Zd60QioXq9bjbmnm/AHPkQ9x2LxTQcDu1+OAvIR1h79gHttCRju+r1usrlsq6vr9VoNBSNRk061W63rY0zsh7YTUk2jBEQ3mwOhcndbtdsrdFomJSHs8Ma4CdYPzezwRoVi0WbQcTnlsulBWX4PM4/fo713e12ZlMACHbKerA/SJDcRgqAFffo3jvMnSSrO/C8Q5cqgDe4/vgrwMgAIwOMDDAywMhPAyM/9NL1ozNcHGo0mbzRszg4GzdtR0Gbq5HkM65+msXk97NJ/D4eFKfA4UELDRDhvBKJw8wKfi9FmejcATqXweOgwCziKGiXOZ/PValUzBmg3Z7NZiZDgGEqlUr2zBT9wcz1ej1NJhOdnp4aS8Ukcd74YXUwYHcAoAuGOHv+W5Ixc3RFgjWAfWBNaT2MHhjHNRqNlMvlVCqVbB8BEdrEptNp+3dYgs1mY0DBmg4GA0uPc5ii0aixnOwPxcM4FQAqn88frSF63WKxaOl7mBicHWDOPcA0knInMAFIp9Op1uu1ksmkdRAiCHDlMjhhimwBLoZ4jkajo/S89MD0xWIxrVYrtdttDQaDI6lFKBQyZkt60Aozx4P1ZH5GOp22Llek/QEU1tDdi+FwqHq9fuRkAX3Yyevra+12O/3lX/6l3YMrEfA8z6QDdO+iTexud2g7DSgSBLIPrBkOCpCpVqvyfd+6LeFMQ6GQrq+vVSwWTVrFGrAXpPoJkPL5vFKplNrttr0sMCizVCqp2+1aTYPnHWo1Tk5O7L9Ho5FJYjiLDFudz+dqNBqqVCp25mDKsAvWCrsJh8N2bzwvLCcdvNxaBT7rSiGwIaQc2DbZBeybM+OCD2vOHgMmrB/PwJp73oNunWcjgA2uP/4KMDLAyAAjA4wMMPLTwEi+//ddP/qFy/3i3W5nrAip5NVqZelTSZaWdnWrLJzL4GG4HFBYChaNzZB0pF2lEBM5BTpffha9OfdMAAE7wcJyT8gaVquH9p84PJw7bIP7u9gYV8fOYDnAkq4sjUZDvV7P0tPT6dQ07LzJw4QAppvNxpjFx48fWxp8MBgcpdxxqKSc3XvGEQPK7GWv17NORJPJROfn50eSCIwxHo+bE8RBhcNh62gEYGcyGSt2xDnjaPb7B210NBo9kohst1tjEJk6D/ux3R7mqcA0wa7hqFxAA1T5nRSFIuXhWiwOAybpJtVoNPTixQuNx2ONRiOFw2HTwHNIsTlsn6ADm5zNZioUCkokDvNSYKxgZ5gbsVwe2hpzb2jQcfD5fN6026FQSDc3N0omk7q/v1e5XDb7PT09NdZ5PB4bCFKknMvltFqt1Gw2j3Tw0oH5woEDKNKhjoL7JFXPmUwkEsZ2ISMiwIPRotaD9QJwCTYIHChYRgqD8wKcer2enj59qtvbWwOiXq+nSCRindCQjXBvSFi2261ubm5UrVZ1eXmp7XZrxfLlctnWkPujvTAF4rQyXq/Xur29ValUMiCEeSyVStb2lrPPenAOAXTWmEATaVUoFFK32zX74rOcT4JzziM/g5MnqCfwIoh37xO/4Po69h2/h/8loJZkZyu4Pu4KMDLAyAAjA4wMMDLAyJ9k8DFfgPN3WTBSrBw4bha9MIvGGyyGz0Hgd5FKh2XBAFlknAYsFt+H86Mgz9WQu0ydJCumc9k9tPLoeDm0w+FQ5XLZGCTesD3vQYMKkLopTRiAu7s71Wo1040/ffpUg8HAuhoBurAwaKNdmQaH0jXOcrmswWCgxWKhfr+vQqFgHWRgLrlnDAwjZ00ymYySyaRev35thz2VSsn3fTWbTTWbTT158sQACW0u6+l5nrEEFDifnJwcMWi5XM4KgzlMMBiuZpaLe+PAwwDTbpYUMSDhpvtdRmS9Xltb0Wg0as51Pp9LOhSpTiYT6woUjUatZSz3T1pakgEfUg0Ku3GgiUTCWrjCTmPH6IolHTEvsGBuhyGYYNZ2NpvpzZs3xlgjJXJT9G7Rd6FQsMGfnBMGEiLZ4buRHK1Wq6NiZOlQ6M1zz+dzlctlawlN0XqlUlE0GrVaDhhvAqVYLGZDQ10Hyx6yppxhnj2ZTB6dSX6Os4sP4XyEw2E1m00LBr7++muVSiU9evRIo9FIzWZTk8nEAjR36CK+iyCV4A1gms/nFtxR3zKfz234I7NNAEP8lytlkXTkEzebjbVSdiVnPBv7JOkouIaZxY8Cwqwt3+VKHVzgcJlosgvcI4G/C/zB9fFXgJEBRgYYGWBkgJGfNkb+JC9cFPTxALBPbucP3vwlHTlr9L44OTaRz7CAPISrSXc3hHQg6UgO9GQyOUq5wzxwiGG11uu1ZrOZOTS0nBRhAhLcJ0WfvEnjEDmUGOZ0OjXn7epqSbujfcWoYX8mk4k5fO7ZLYblTX82m2mz2ZhBwwx1Oh3T+CaTSeuU5BoDz8XBg1FMp9PqdDparVZWIAwrut/vdX19rUQiocvLS9PQ03aXWgWYRGQzy+XS2CA69ODYYJVwtC4z6rJXMBNuMWo4HDYgYP1crfFutzNAD4VC1s6Wz0ejh45UdCmC0YNxxpnB3roXBw7Wy5XkALQMsCwUChoOhwZ2zL7BKcOy4FD6/b7VNlBMy7BS7rvRaOjLL7+0AAVnPZlMTCYkHZwENRQ4jFwuZ+cTux2NRioWi+Y0uR8KeLF5iuhxtgSMdAPCOfu+b8XjdDGr1+ummXclSPv9Qe4EQ4nTJVhstVp2BrBdN4ibz+fGfKLz7na7BoqJRMICBwrjy+WyJpOJnTdY2VwuZ4NXy+WyJKlYLMrzPOt+NhgMjth5mFf8IfIk/A7BGs/kyhWwTXxQJpMxlnS3e2jHzjllT/FRBMywbrCx7D8Buft59965R4JXNzAkaMVv8Jng+rgrwMgAIwOMDDAywMg/fYz80PWTvHBxqBaLhd0MjoW3Sg4vaWUMA0fpMizz+dwehodwD9tqtbLfzc+4KVz3rds9wLAxgAq6WkmW6t9ut0fyAd6i0Qev12vrNBMOh8158GbsMmKxWMyMGdaQz6NRxUntdjvTuVOwjMGx6dwf7Xpx2q6OlIPkaqXb7bZms5mleJESoL33PM9a1nL1ej2dnJwYA4Sxp9NpPXnyxFq24jRdaQKMWjQaNQZmtVppMBjYjI53797p/Pxc2+3WDoMkCyhgsEgLsx+bzcOQOiQzu93OCn8B2mg0agWb8XhcqVTK6gawRTfFTADBXBeYuFevXqlarZozAdzc9D9peeQaOAc3SKX42/M8G7QpyTTMi8VC5XJZy+VS19fXVlvB74TNxQkgddhuD3UR/X7fNOKTyUTZbPYo1Y59A74U2NI1zWXYsEv33mBa6/W6fQ/rsN1uzQliQ2jAcf4wqrlcTpvNRo1Gw8CW+hTXsQPgMEYAFQEFQRJyHwY3MlQVeUgsFtPJyYl8/zDIMRKJaDqd2rwbHC6yHFiqZrOp1WqlcrlswMdaFotF2wN08hTDc5aQ5xBEs6YEkNguPiWdTltQzTrQwADb5KziXwjI3z9r7jmG7cY++SzBN74a23JZQZe9w5dgt8H1cVeAkQFGBhgZYGSAkX/6GPmh6yd54XpfJ+mmK9HT8vaOI+NN2T0w2+3W2lrC0o1GI9No46AxAPf383kcKgPi2GjeWgEkFsh9W+We4vG4OQ3+LBaHGRg4s/cX32UbYRAw+slkYoWHOJpcLmesUrPZ1LNnz+T7vqWYuT+X8cT4SNfSOQjQgQ3t9/vmFEgto3/e7XYqlUomTcFJAECNRkP1et0AhDbA4fBhmF82m7W/6/f7xmROJpOj9D6Hn7UKhUJqNpvGzsAcsi6z2UzFYtEYKDphEQDgkAADtMkEJhQJwyZJssOXSCSUSqV0fX1tUgnkBaxbPB435hWWLhqN2nT0QqGgUqlkLX1h0JDCdLtd+335fN5az242G+v0xWewMxhY1ow9q1Qqpud3tfeAXigUMlb77du3+vLLL03njI7clRRJB0DL5/MqFArqdDoGSjie7XarfD5vf88e8V20YnVZehwh7B1/T2ABAwkDPJlMdHt7a9203JQ9ZxjmFEZ2Op0qmUyqWCxqPp+r2+3aPBj2gWJfWDmXBT8/Pzet+WKxUK/XUy6X05s3bxSLxWx6PYHFcrlUv9/XbrezoESSAToXIIjEg4wDHcEAJvwAAQzZAVrw4pdYP0kmtcEvYq8E3QAT5wp/iX9jT5E5wZSzt57nmV9hvWFDJdnv4ueRkOA3g+vjrwAjA4wMMDLAyAAj//Qxknv8vTjwxwLGH7pwNLzx8f9x7KRNXS26q7Fmg1hU3jRdZ86DuW+9fAbjJgXN/4dR2u12tpE4JwDJlSAAPGwCn+HtnIMM+7Tb7UxyQdoSp86mcH84fooNKYalCwxMEaAD6wcL6TotHN5+v7cCWwBls9lYahZGSZK1UsVJ9/t9kySkUikbPPjmzRt5nqezszNjdRh45zoRjPnq6soYmkajYR2YYrGY8vm8dbDBUcEg1Wo1RaNRDQYDnZycmCYXm3EZK+wGpx8Ohw1sp9OpASPFpzhrbCcSiVhLWb6HA5ROp9Xv901fzoHO5XImX/niiy8M7GB6CSSQl6AfZh9gd1grJCSe5xkAA8hIaNgrmOlwOGxtjpl3EY8fhv65zPJqtVK32zXJDRIFnhPnBRgRDFGYjd0g6eGeKPKlpiKVShljiyPiDLx580Z/8Rd/oeFwqMlkomq1qnK5rPv7e4XDYRu+CSOHg43FYqbnJvCgkJ86Aeoy5vO5dYpaLpfKZDKaTqcaDAbKZrMWQFJIzPkajUZ69eqVHj9+fKSB59zDWsP+j0Yjuz9YTXxIu902ENtsNlb8izSMbk84bhhc/AXrjnOez+eq1+tqt9u2/u12W77v20DRzWajUqmk8XhsgRTzX7hn5EiuBMMNImD5ORNkO/Aj7ksU7CW+Gp/M+QNoguuPvwKMDDAywMgAIwOM/DQw8kPSwp+kS6Fr3LAK3Axvnzgi3hzRb/P2SYElrJ/78Gw2zobf56bQJZnDC4VCJg8gXcihcZ0CxY2wgbwdz2Yze1tG/8oBx/mGQiGNRiPtdgdN7XQ6VaVSMcPjDR3WkQPCpnAPklQoFLRcLo9S4sgl3G5WLjMJuME6YoxIQ2AWEomEpaljsUOnKEnqdDrGYGUyGe33e52cnNjh8bzjIYswY64UBCOMRg+FtbCeaMwxbECuUqnYerKng8HAZi3wfLAEODaXGZIOjpduRrvdTsViUZPJxLpPwe6EQiENBgPd3t6qVqvZWnqeZ1IM9pH9ns/nBnaAIGypy3ZxqOlSxfdFo1F1Oh3d3NyYpn4ymZiTQ6KSTqc1nU7t98xmM81mM2O5F4uFarWa2TdBFc4jm82q0WiY0wEcYOBgpGHMW62W2RhdoMLhsNVvFAoF06T3+/2jmgzkLy5AspYA3Gw2MzBvNBr6xS9+YQzeer0+YqtgpzjbBJuAX6PR0MnJiUKhkJ3dRCKhUqlk2n0CRpfl7vf76vf7Oj09NSnDcDjUixcvbJgrNottdrtdhcNhnZ2dSXpgfGGu8RMuk8W6Z7NZVSoVRSIR9Xo9dbtdNZtN5fN5A+Vv5W4AACAASURBVAv2QJINh4Rt2+12xq4RPLpF8Z7nqVar2bMtl0sLigjG8afuCxEZBWodOFsUjBNI4yNcRs5lBNkbvpd7D66PuwKMDDAywMgAIwOM/DQw8kMvXD/J4GM3BQz7gpNbr9fWjtUtiqNwlQd1U6oUt5JyhMUB7F0nFQ6HzVmhY6Ugz01jusP2SO0j7UgkEsbKsdC+f5hzEI/HdXl5aawCRsBiowePRCJqNpva7/e2+OFw2AoVi8WisYQ8x2q1Mh3yycmJORwYqvF4bIWqq9Wh4xAGzsFl/Slk9X1flUpFm82h0Hi3O+i8YWMAvvl8rqurK02nU5XLZWWzWZ2cnFhRI2CQz+cNbAqFggF6sVjUs2fPjIUpl8tH804A0nK5bKzLxcWFdrudda/KZDImjxiNRpJka+cCXbFYtJS923lnNpup2WxquVxa8TEHA/CZTqfWqlSSsXSsyXa7NWDv9XpqNBpH+t3RaGQpeN8/7jZFgTUMTj6fV71el+/7KhaLKhQKdgAZzsj9u8wZoEKHrWQyqVarZUGL53nKZrP2/aVSyViofD5vBbs4V5g25BgEVeVy2QItNNgwSQRG4/HY2DzP8zSbzRSPx4+KTmFO6VhWLpeNOUskEhoMBjbvhP2AhYMBoyCZ76LOo9Vq2VmnviSdTiuXy6lQKBizjX0Ui0V7xlKpZF2QpANrfXFxoUKhoH6/b4wf9kyW4Ne//rWurq7MzwyHQ+12OwsW3LqGzWZjgRl2PRwOjSE+OTmxOpl6va5isWiOGMc/m81M2gFLDTONHIkWwLFYTK1Wy4LyVCpl6z+fzzWfz41V49xMJhO12231+337uclkYuwqAQ3BBUHOYrEwG8O/Yn/4gz9Gpx5cx1eAkQFGBhgZYGSAkZ8GRrqZr/evn6SGC8aEB4IZ4It5U0T3itHDeLCAi8XCOrbwO2DmmBjO2ykL4AIKbA6OA8e83+/V6XTMWZNydnW/OJ/39cPT6VSPHj1SKBQyFqNYLBpTRgqWFLvL2LGRaHlJO6IHpwiQdqj5fF7RaNQKRnHOaPh5m8ZofN+3ieie56nT6ZhBuhIRDlEul1M+n7diYlrhzudzc3QUxMbjcdNXoxVnfabTqW5vb3VycmKyChiFYrGo6XRqDoi0LcHaer1WPp+3/eJZJ5OJksmkDc+DOUomk6YFhoEAxFkXWrlKsqDFdRj5fF6dTseYWxjB7Xar6+tr6wpUKBSMXWJfSMdLB8kOBdxMdPe8Qz1DMpm0AImgCNau3+8bU4VeHbDC0XFu9vv9fwuGYLdpn4v0hU5JFxcXxmgTCJDWv7u7kyRzuq1Wy34W+QJynrOzMw0GA2Mo0dXf3t5aip5gybXlTCZjEiHP83R6eipJBjzIJQjWCEr4bwIjgsLT01OFw2H9x3/8hx49emTnDBYcUCGo2Gw2KhaL5gcYoMk6z+dzsx+kBQQVhULBQJ/7JSvA8EwkDgxxLZfLNhNotzsMPmWgKLbKcFlscrfbWac6/mBrsJQEildXVyoUCha4ILNwGUQCBTpjuY4etlB6AHH8G+cNmdv77B0XjD3rRjCFbw2uj7sCjAwwMsDIACNZowAj/3Qx8kPXj85wwWThxNlwNoaiOsCBvwckksmkMpmMab2Xy6UdTIwedgEWiw5Ew+HQ0rHumyigJckMgIVxaxBgCXG8OD+KYXljhZ2Lx+PGlkwmE41GIxug5/u+qtWqpY9JIa9WK41GI2uDGo1GzUHCXkoHDShOcTKZ2FrCUrnFyxS4Ij2A6QDU5/PDTAuM7uzszNL+lUrFZkIgO5AOwHt/f2+AxJp3u111Oh1jfXz/ULTM2rMuOHPYqvF4bGwAzwSzGIlE7N+n06kKhYLK5bKxvAA6TCPMCDUEMHUU4DYaDUvXczAAPtL6sKQEOUgK0DJLsnVPp9Mm36GY0l1j2DDWF8fImrlgBUvIocf5JpNJLRYLjcdjK0am4JaU+fX1tX2egIciXLTfyCckmV0gKcDGW62WPM9TqVSy4nBsk8AHx7fZbMyJElRg88gcYKlKpZJSqZTi8bi1iE2lUhqPx3r58qWxt0hRarWazs7O7EwxR4WzkkwmVa1WFQ4ftOCwye12W5lMRicnJ8asYZeuLht7Pj8/VzgcNvvc7/fWSanX61nrYHxSMpk0AEMjj44exhJNuiuT4kw+f/7cdO4EyRTgw9wRBPG5ZDKpwWBgxey9Xk+dTsfACD+43W6VSqWsVoFAFNvHl2K76/XaHD/BDxIXwApdO8G/JMtMEJwTsHPvBOhBDdfHXwFGBhgZYGSAkQFGfhoY+SFJYfirr776eARxrl/96ldf8TaOjpQNwDjRaboMVCwWswnsbBZG7Gov+T0syGQyMUfB4lFA56YMYVDQekqyA49DRuPKwtJGV3qYLA0LxJs698Iz0tYSY2b+gltgTvtNmB3YCPc5R6ORksmkGQkFwjCb6NHp8ILRw56t12tzmBg7z0o9AD/raqw5IAQBnuep2+0aoDPQkMMOI0gRZDweNykGzhkJB8P4cDBo2F32ElClgxCyF/ewSg8DNylWBXB837dC41KppFwuZ2wLeupQKGRrix0Aoslk0mZrlEolSdLr16+Vz+fNASCjclsxs/ewPfye+/t7xeNxffvtt/r5z39uQBUKhcwOarWaHfThcGjMKOyS7x+KepljARuHXSAzCYVC1rGM74AFGw6HisfjFlw0Gg3rDkbw1263VavVNBwOlcvlzG45FxS604oZ5hypwHA4tPOHRt/9k8/nTW5BcSt1D+zlYDBQLBZTpVKRdGAZf/Ob35jUgMGV1WpVkoxdzOVyajabdq6Qa/B8/X5fjUbDtP7tdlu5XE53d3emzydAqlQqajQatobr9druezQaGdO/3+9NioReHLtrt9smSWG+D3aSTqcViUQMPMlCUMSN/cTjcc1mMw0GAxuW2Ww2LRjjXnzfN2kNPg8GED/lZg2o5eHnJVkQGolEjFWWHgrT+QwAwzn8r5qS+6+++ur//ijg+ISuACMDjAwwMsDIACM/DYwcj8f6h3/4h//z+7DgR79w/eM//uNXvA1zc24RLA8fDoftf9nI/X5vrWlhG3DMkuxN1O1ugzOA2SOV6EoLOJjICKLRw4A+5A84B9KMMBm8FaNXhuXCwd3f30uSarWaddAJhULG1vGsaGiRQOCEN5tDoSwaU34W5gQWYjQa6d27d8rlcqpWq8Y2VSoVm+8gPcxtYWI3bAF6anSwvJFT/IghwhJEo1EVi0VrC4rjJiWOE8U4KUrebrfWsaZUKhn7CXMKiwsLCWOG0dOVR5KxvIA7YIs8Ybvd2nBDnAX7gmQEgMJpcm02G2MAcUYwgaSMI5GIgQlOgsPHenHwJFnxcTKZNIdOMWqj0TCbhaU7OzuzLlFMtMfpr9drlUolFYtFSTJbBeTZz3q9bin5UqlkBaNugObqiJHxJBIJ3d3d6d/+7d9ULBaVTqeNWfV93/Ywl8sZa8M5Wy6X6vV6BqquLIWiYRwTUhUCDhhw3/fV7/d1dXVlHYqQ6+z3Dy2Hy+WygR5BWjabtbOEPCSfz+vs7OxoUCl1DbDgaP6z2ax++OEH9Xo9PX361M4JbYlh6viOm5sbbbdb5XI5C+KQYMD4UTTc7XZtfd++fat8Pn+0ZwSL+DwkXtwnrC3Fx+122+Q0sVhM9/f3VlTebDaPgknsm+5fbtADYGHD7r4hg+K+CI4Aevy429UJyQR2FbxwfdwVYGSAkQFGBhgZYOSngZHT6fR/74XrV7/61Vc4BBcweOPD8QMu/B2FZrvdTq1WS/l83nTVvOFTsMcDUmiHwycVyhs6xg3QYCC5XE6z2UySrPvParUyFoeFT6fT1hkHZ7harczgJFkqm1auMBE8E9/HBiBb4JDRohV5iKuxpgB0v9/bPAgmgxcKBdMnAy7uIEoKqjebjTE78fhhmCF6WDTdHBIkAXSEgnEAAGEQK5WKfQ4WLhQK2QwJ3/etEBsGCUAnwIBpzefzajaburu705//+Z+bvh8wxo5gXUmBs9cEBuFwWI8ePdLXX3+tXq+nZ8+e2VrAbsA4YgsuwMCmFItFLZdL3dzcqFQq2XBImLNisWiyhnK5fAT80oHRg1mSZFrni4sLvXz50hw9bNZ+fyiYplAZhni9Xms6ndp+I0mpVqt6/PixSRw879A2t1armRPZ7XbWSYnfDYvH9+52O93f31tR9Xw+tz2WZOxZo9EwaQdyCWQfgDf2BcuFXdFhazgcKpvNWp3Hdrs19jkej6vRaJh+fDab2bmbz+e6ublRKpXSv//7v5t0iYGV7Ek4HLZ6iLdv31oxfCwWM2DA7+RyOX3//fd68eKFfN9XLpfTZ599ZkEBgF6r1f4bU+dKXq6uriyQe78QlzXEB+VyObNhAJ7vo30vznu/36vX65nT5xyPRiNjNm9vbw2QCLYJGvBBBImuxj6ZTFpADICxbpw17Nj9Ge6LM+iC0n/ZX/DC9RFXgJEBRgYYGWBkgJGfBkaOx2P9/d///f/eC1ehUNB+vz9y1EgPeHg04hgiDpY3/1gsZsPUMCLpQb8Os1YoFI7SjxwcnCeH3dUPu2lCDh+Mned5ll4HKNwhaK6UAo05LBbsFOwRAImDpJiW1KXv+zo/P5d0YH82m43K5bJisZg6nY4VyXIvrCXPMBgMbA1JY6IXp1AUowMsYHXYD9LhkkzyMB6P5fu+SUgYdNfr9TSZTJRIJEyOApMEcBcKBYVCoSPWlALfSORQsJnNZlUoFKzol9as7DFOCqYABpj9wOABPX5uuVxaJyAKK3HGTCJ32SiYmGw2q3g8rn6/b3KA0WikRqOhWq1mmvXNZmPtc2EECYAogO90OjZwcLFYKJFI6NWrV1bkend3Z3IIBiPm83mlUimTinBOYrGY6fZhql69eqXhcKjz83P1ej0tl0tjjSKRiO7u7lQoFHR3d2fp9n6/bzIkCodhNgEN5Dmc1ZOTE5sxw/oidaEw2PM8k5FMp1ML8EajkarVqhKJhDqdjgVo3W7X2CykF7SKxXFybwBbLBbTzc2NqtWqXr58qVwupydPnlgRO2DCfrZaLevQRJCEH0J2QhA3HA715s0bVavVoxbDyDlms5lyuZwk2dmhKJ7OVzha9osAkKLxVCplbBiF4Jw95tZEo4cW0ewN0pXt9lA/0ul0dH9/r5///Of63e9+p9vbWxWLRZP7SLL9kR4KwGHk8IcUtBPUEbC5LDo+DL/F3rPGLhMvHQK9Xq8XvHB9xBVgZICRAUYGGBlg5KeBkR964fpJ5nBJOkrlkY4mFUfHFlgcDiSMAxvE/yftNxwOdXJyYo6TxYtEItaycb/fm86d9CgOlO+CzWETYOdgdTgYkUjkKOXsOjnaivLfOK1ut6vZbGYSCNLjdFgB7ChIHI/HSqfTxhDRdQpQcosQpUNBKiyWC2IYM92R3JkUFHoCfhgYb/8wqKlUysCNg4ocYTAYaDQa2V7gfN09LRaLikQidg+AM0WvAFw6nVaj0bDhg8+fP1cymdSrV6/s90ejUUvhs958z2KxMP0xgEohNnUNDKrcbrc2UBKJCIwIen5Jxr4AFNQFML8CKQzs0maz0c3NjZ4+fWp6bxxru922/z8ajaz4Gjsol8vGqkSjUUtZS7L7gd0BvCg6ns1muru7Uy6X03K51LNnz/T69Wu1Wi1dXl6aRh4pEEGLW6CLTIaaA+4jkUio2WxqtVoZkHBOU6mUtel15Sv39/dKp9MmY/B93zqUNZtNC3RCoZDJeFqtltn7+4XUzObpdrtW6xCNRtXtdvX8+fOjPQbUsMFMJqNHjx5pPp/r9vZW9XrdQJSgAf3/er22M3x9fa0XL17YmafjVTweV7PZtCJoAgjfPxSyZzIZ3d/fG1tJ693T01NNJhPl83lrv0wb5+XyMICS+6KZAB2tkFsQ+IxGI00mE6sbGAwGKhaLVszLPXPeWG8CEH7G930r4I/FYur1epIeJFYE58hakAUhcYG5w99I+oPdmoLrw1eAkQFGBhgZYGSAkZ8GRn7o+knawrPg7gOg/XVT567WGUfFTZOqY1HctrGkgGOxmKbTqaUF4/G4FYayUBRx0nISIOEN3E2p0z1GeiiQ420bZ42Wk3tCniE9MCAYOJIEd6jgbDazDkOdTsfAZLVaWUqZgsztdmuFqovFwgYw0ikK8Oj1epbeBGDD4cOAvnK5rNFopFgsZh2iWCPSvIB2q9Wygw9TBVPm+77q9bp1SII1xAhJPaPXhVVaLA7TyykQLRaLGo1GBlTffvut/vZv/1aS1O12jckDgGKxmDGA0sHJTiYTm/1xdnZmKXdmMTx58sTYoOFwqE6nc5Q+3+12yufzkqR2u23zHWBwY7GY6vW6ut2uJKlUKtkaF4tF64xDVx7P84xdBPhZ481mY07o+vramMXHjx9boNXv9zWZTKxNMrbmpsx/+OEHc2I4WQ7+3/zN36jdbqvT6ahSqajX65lkZTAYmCzj3bt3KpVKqlarSqfTevfunSqVip4+fapsNmtzN5bLpe7v71Wr1dTv942VzeVyVreBw6GLE88IAwQzjJ0DjgR4tFuez+dWM7Hb7XR2dqblcmlOf7/f288S9HQ6HcVisaPCVEkWjE4mEysOPj8/t/bZ6/XaZrlIB+A+OTlRrVYzdpROSfwuOiDd3t7q4uLCQJNzCJuN36jVasZSwsATEPIMaNIBNQInMhBkL2ALkUatVitVq1XV63X7ftg2WEq34JyAG3YR6RLZAwbFIongItNA1oEXBGyRIBqWMLg+/gowMsDIACMDjAww8k8fIz90/SQZLg7Dcrm0VDxvgZLsEGEc6K9hHNzNRmPreZ45bfetEWByHwx2y33rJL3KIccRkqIl9c6hwPkiTeA5eHsOh8M6PT21lORwONTp6akxI8vl0vT1+/3eOgXh4EmPcv+e55kzXq1WpvWu1+u6u7szVg8WFOkELJ/v+wa0/yXzUbvdNq307e2trXe73ba3eJ4Pw6YzDql7N7XNYEIOEQXJdL1Zr9fqdrvGFMKWUmjp+76ur681n891fn6ubDar29tbdbtdpdNpdTodC0Jg3kjxf//999rvD206y+WyaawxfLTQyE6oDSBAATyoTWAfuf9kMqnHjx+r1Wrpm2++Ub1eN6YKTTdMIYcPZ0hKHK0vaXcc3nK5tNkf3333nWn4YW3evHkj3/ePNOQEO7Bm4fBh8CFAdnl5qcViYWubTCZtTQaDgZ2FcrlsILjf7/X27VsrvmWwKMwisgSYc2obOAsw8tg1RdUEVQCcK6Vx2WlaNcOkYq+0ysWG7u/vLaXPWULCUCwWrZ7hr/7qr6zuoFgsqtFoGEPprg/toCnspvCWs3t/f28sN1p8NwsQi8VMEnRzc2M+xfd9nZ2dqdfr2RBIBmWiKcdp032Kgnj2N5vNWjbDlVAADnzvbDYz+dJqdWh37BbyI+dyi4Pdz9NtTZIFONQc4F9hqJHIcJ8wjdSUAODYbHB93BVgZICRAUYGGBlg5KeBkR9SgfzoGq5/+qd/+qpcLtvbHywoNwsjBpPGTWPQsGPSgQV8H3RgcyRZcXA4HDZtMxuJpplFIx0ejUatowlvvS7rx3cj34BtCIVCVoDIMEi00XzGbRHKWzMpSMBkuz0MUqtUKvI8z4xEkrEh9/f31qVHOshE9vv9UUEt8gwXWNGw872s92Kx0H6/N+YLphRQYsBfPP4wdBGHEY1GbRglxcSwnbzBN5tNYzb7/b6tJeuJ3MAdbIdcAsABKE9PTw34ttutsbtIL0qlklqtlkkkkC6s14dhg5KsYxbdpGAfEomEzUIBACjsdRlQ7gt2uFarmcYZ5hbtOEwt9RY4f0mmx18sFmo0Gjo5ObEDCygTiDx69EjhcNhsRDoELsvl0uQuyEZg4er1urVmpQie4AV5SjKZtLoPZEXYT7vdNkkTBcgEFkxsJ+Ag4EFShKZ5PB6bvAk2iZoPwDSTyej6+lqxWEzZbFbr9dq04wQZ/L52u21nH602ZwR5QyqVsufDP8BEYePMQWH/V6uVOVrmz0gyGcp0OrXPUHTO8/MsFIPDJANE6OhpWkA2wQVtAt73td+JRMKCQRw5ALrdblUsFg2caTWNY8ce3NoLZE7cH3uPJI1AHhDBZ7rsv8v88e98Hy8L/P//koMENVwfcQUYGWBkgJEBRgYY+Wlg5HA4/INdCn8SSSFObLs9tHQkVcq/4cRwFjhKVy7BggEky+XyaAMoQiQNyP8iS8BpbTYb07jCFiQSCU0mE9ORs0Hvp7rdzjO73c4mf8/nh8nZODHPO3TIoYgxHo+rWCzq7u5Og8HAWDgYNle7jqaXzw6HQzvUksxxshYAH3NDNpvDLADS+KyXJFtPiiDRpabTaQNW9oRUOGuH0WBc9Xpdi8XCGBzuiaGP0uFt/uLiwn4Pf7darSyFXCqVrFj65ubG7psWu6xHq9WyAuRcLmcpY5gF2g0TUKTTaWOTcPQwjBwwGBaAmDaeq9VK6/Xa0vgU1H722WcaDAbW8YfAgLoC2A0YZ+wnnU6r1WopFDp0pUIb3Wg0TJaA7AAmkMDALZBfrw8zN9D0t1otC9C+/fZbPX78WBcXF1oul/r+++9t0GEkErGOZOiJN5uNBQSk5DebjX7729/qs88+UyKRUKFQMOaKMwfDyH9jt9ScwAoi83CBFs37bHaYbE/Bcb1eNyYdZng6nVogSaEqjtvzPHU6HZ2cnOj169dKp9PGmgGY0WhUhUJBrVbL2r5SIF6pVMzZApyj0UjZbNYKyPP5vIrFolqtltl/LpfTeDzWeDw2xr5arapYLKrX6x1JtaLRw5DKwWCgQqEg3/f15s0blctlbbeHFs+sGRr7drtt4ELQBIjil3zfN4kGLOlyubSAiSCBoBpfy7mA3QaksAlsDIaWoJTPE4BJOpJghcOHltAAFP8WXB93BRgZYGSAkQFGBhj5p4+R7s+9f/3oFy7eHHnj44Dxx71h3pRd/aQkSx0ieYAl4uFhgzgUvPkCALwdwyCiD+UAo5V3WUR0mxgGU94lWatcmMj5fK5ut2uHB0OAEaH7zn6/tw1g7gmHiDWgaBBGjHsolUoKhUIaDofWEYruL/wMz4BGGAeEc+r3+xqPx6rX62aA+/1e9XrdWDZaYPK2joOEjaAjECwAunVYG1LErCFa/uFwqGazaYECaWdYokgkoqdPnyoaPXQlggVjaGS73TYZg+cdCq0LhYLG47E6nY4VLv/ud79TNBrVF198YUC2Wh2mo282G3W7XV1cXGiz2eju7k7n5+eqVCq2R7TrJXDAqcNqJZNJdbtdFYtFA5HRaGQpeOkAmOiHWef5fK7RaKRaraZ2u61UKmVSGQqRsVWXnXTtslKpaDqd6vb21rpEzWYzvXjxwtYIPflgMNDz58+PbB+tNg6F87DfH+bWMKOEZ0DSwFmKxWJWqOp5nhXO03WL551OpxbgwaZhfzjOy8tLqy+B/SEQgTGcz+cGKDDmgBu66HQ6bcXeML5IXjzPs6CE4KLdbtv9LhYLC8L4TqQgr1+/VjabVTqd1v39vUlwWA/08wCC5x26el1cXGg2m6nVatnZLpfLmkwmKpVKdm7YE3xCKpWy1raADYw0djcajawWZbvdmmQF5nmxWNiZctlySQZSyF5gIQmOyUx4nmcBKZkPnhWf7Nol8gmkPYGk8OOvACMDjAwwMsDIACM/DYzE3/++6yd54QI0eEPkv0kXwoR53kN3JdKdFJvh+Pg8b4nuApL+JIXPwWCBYG14u3VByZVIcNjQfLusAW/8m81GuVzOWlkyUBAAIdUKMwRQAEAAJxKN92c68Nxow6PRqE1gl2Qpc+6RjY1EImYUGKIr1ZjP57q7u7M2wfxbtVq19Dbryvqg4V2vD11qKNqE0XDf3Fl79pd9QtNKETOHH108Tpt947kvLy/l+74VUw4GgyO7ubq60nw+N73vcDjU5eWlaYxxSo1GQ5eXl+YsKLKGqeN3AprVavXI9nCspVLJUuAEJewX6XCXRQUISPOfn5+bQ0arjCPi+XHMHHIOKQXG5XJZb9680eXlpTnrXC6nRqNh6/3FF19YXQAsD2woYMXzlstlzeeHYZL5fN4cOp+jUB5nxHliPgv2QdchfhaQQGrS7XaNjavX6xqPx3YfBBzYNA6RInr2Amc2n8+tfW40GlWv11M8HrfhoTB2pVLJOl4RnNAFDfCj2LrX65n0YrPZ6Pr6WpVKxdYCmQaSC7cIG+YfuU4+nzeb4UUkEomYtAL5D/eFJCOdThsIcA6Gw6EFyAAq/+bKf1x/BgjAEnLOQ6GQBa4E3i5z7zJ4fMbNiLzv1wkWCTjczExw/XFXgJEBRkoBRgYYGWDkp4CRnIffd/0kTTNYOG6eG2SBVqvDQEQOcigUMscuyViAzWZjhWz5fN7efklxu4sBqAA+6/Vhujjfi1PG6JFfAEgU5bFoTBCHoaITFE4IdoUNZwBfoVAwQ2dzYKwoAqVlKwMEOaz8LIwBxgmLxRrhZPidrDWH2X3DTiQSxhJIh4LYdrutk5MTk37APOHkWUvkJTghtxMVDkSS3TuddHDYtDhF30xKG0fr+76KxaIVIVPwGg6HdX5+bo6NFrowrM+fP1c0GtXbt2+VTCZ1dnam8XhsbVoTiYQeP35s+v5Wq6XpdGpOtN1u6/T01OZr7Pd7K0idTqf2PIDZer02bTf1DACB7/umd2dNYK03m42azaYxSuy3y1Qzz8U9wIA1NQYnJye2NsVi0aQ5Nzc3KpfLSqVSNhfF1ZID3gQI6MEXi4Xu7+81n8/N+SBvOD09NQkIewTLRWDGOSaYAMQl2b1LMl01QIsdh0IhTSYTkyBMJpMjZgmNO0AFcwTLtdvtlM1mraMYunAAmlbDo9HI/AQSDAKAq6sra7l7enqqXC6n4XCoWCxmRdHdbteeGTkMYDefz81+KHSGCTCPugAAIABJREFUvet0Otb1arfb6fb2VrVazUASpjEajR4FwgQU4XBYnU7HBoISJAN6aMxZe8AYP0AQiB24UgqAGEkVGnbX9jgTLrDwx2X18IfB9XFXgJEBRgYYGWCkFGDkp4CR/6svXDgNt/UiN+em83BIGMrRTfi+FTfCkiSTSXt7Zh4DB4XDj6NHo82h3+/3xrSx4CyMJHsjT6VSpqlnDgASAUnWOYbnwgkXi0UzBgowYdm4p/1+r+l0al2GKAgmBcra7HY7+7tisWiD3lKplMbjsd2b7/vGFJBG5nlh+nDqiUTCugVxEHu9ns7OzqzNK44QgI/FYhoOh8a4wDROp1NjrChqxYF0u107eBhysVhULBZTo9HQer3Ws2fPjFGAzeDwRqNRkzcw7+Kzzz7Tzc2Naf5Ho5EuLi6s5TESisePH1v3pcePH6tQKNj9ojvPZDLGxtIhy2WIpYPD45k5aK7GmzQ4NuWyJ9vt1iQCBDDj8dhaBC+XS/V6PWNxsU3OBw4G6Q3MDYXNDEmkqxI1GqVSybTjnndoXey2H6blrBuQkO6GWQ6HDx2VKBAnfU+wh8Nj/d6XPtEliX3Hac/nc5uHEYvFjmbVoG0noGNN+A78CRIYtNEwetJDW+vpdGrsXDgcttbCsJNIDJbLQyvmWCym8/Nzk0/haPFbBBCw7tyf62vo4uU2KeB3EIghZ0HigfzH930LtlzWHD+BtARGXpIBmuvMqclw5WgAErYFmLCW2DxZCQANm2CNeX7OB3aAXbhMYnD98VeAkQFGBhgZYGSAkZ8GRn7o+tEvXC5z9z5Y8Obpvg26gM1Gug/vbuJisThK8XNoeXgWMpPJ2JT2xWJhTAoHH4YKB0ZXF1pusrmSjEWEhej1eqbD5j5hG6WHFCZpZt9/aLtLup2N8TxPw+HQ7h2NMPciyQqmKeLl/mEjAbf9fm8OCNYUdoDDgsyiVCqp2+1qPp8bIwb4IGeYzWbGhsAGAK5MbHdZG2QVyFlIp+92OyskjsUOcxPookR69+Tk5Gj6OOwEYMFhzWQyFlR4nmdDK/v9vgaDgc7Pz63Y9u3bt8ZS5PN5nZ6eGoC59kRQs9/vrciT9YfRlGTPzWHE5gAaFxSY4UALZYpECSQYvOimoNlvDjySIoDi66+/PmpTGwqFrM1ruVy250AaEgqFzJECaEhJKpWKseIU6VIb0W63jTHCzmF00Liv12ubTeNKkgCB/X5vbBFs8nK5NIlOq9U66hjlyjlch7fZbFQqlcyxcw/YlyuFYq8I/nDW2WxWi8VC/X5f+XxeiUTCgi9kDuPx2BhqmGXa3XqeZ4GLq99nH2Eb9/uD/IOgDpZ7u91ap6zdbmcSC4KGfr9vRcvD4dAK58kauHIXntG1WwAWv0OdBwAA07rb7Uy+wTPyefTorg8H3LgPAg/W2/UnwfVxV4CRAUYGGBlgZICRnwZGup97//rD1V0fcbk6Xm6I/34/5cq/ccij0ai1miRNCPPR7/fNEaMhxfmhrXV10aTvYQd4E6VNJI4Ap0AxIildjJo3ZYpHkT/0+31raRoOh43xwyjZEN6Ot9utGRsbUiqVjLnD6fL9ODEABPDi9/GGjUF5nnc0GwTQYTAk98abOsxQOBy2egAcIQ4daQqSAobcrVYrY8s4GDwH9wrDWavV9Pnnn+v09FTh8KEAO5FI2NBGDuN0OjXG0vM81et1JRIJnZycWFebcDhs90hAwNDFxWJhDBvMWiaT0Xq9toGRPBcOiNQ3nwXgAFpJJrtJJpPWtYcDyPcRCLEf/D5+nlkXblocrXo4HD5iaDzvUExLa1mYsel0ahPoE4mEisWiMXrIf5DTDAYDa/MMINM6udls2mdgrfnuaDSqfr9vwUa/37eADGZqsVgcdTlyWXUCDECdPRoOh3Yv/PdsNpPneSZ5SCQSymazR+ea84lfmU6n6na7Nlw0n89rNBqp2+0ag0rmBbaSrmbSocXt6empsXbYKhkDptZTZA3Q8NyAM9p6gIHAGECElUNug9/gLLlyMiQwnU7HAly6P41GI0UiEds7/CnrStDrsoCw0/y8+xnuDXCABXX9Nv4FbT0+BOaXM4wfCK6PvwKMDDAywMgAIwOM/NPHyA+pQH6SFy4WCycoHQ9i5GcwXpg8NJ6uZhJH5RZk4qT2+4OekqI+Vz6BEyR1yAK4b8YADXKI5XJpWmjSrywYP8cGwUKMx2NNp1M7UPxhhoEka43J/Y5GI718+VKtVstkDOHwQacLK9bv9614k/uNRqNHzodn4ZDNZjPrAJXJZIxtYMNJ4fK7WEOMDAeFPhktLUwe94eWfzweaz6f20FKJpPK5XJWUAoLgN6Ye4KJWC6XNsQukUgYwxGPx83JA4CkuwFkApJwOGztSne7nU1cLxQK1uo3k8koFArZvQJ8HO7FYmH2CRhz79vt1uaSoPlFT03bZBhU1sUtwOQZm82motGoSX3QfnueZ/9Oe1zuLxQKmW3V63WVy2Xtdju1221jYejy0263FQqFzMHm83nTLSM7IlX/3XffHRXdJpNJA+Plcqlms6nRaKROp2NrRCtZnCqf55xtNhvrTLZcHjqKxeNxXV1dmSb69evX2mw2ZmOuzTFhHnYLpgrHDThiO+xXJBJRr9fT7e2tXr9+bUEawUQoFDJAGA6HymazJs/hD3ZBUJFOp81Wsd3lcqnhcGhOHJ9CF69CoWBtsAEJ5Ez8fuwXP0ORPaDN2aYWB2nJZnPoeubKRfBt8Xjcmhtgh259AyDCZzabh2GhblEwBccuG8mz8G/8DoCRnw+uj78CjAwwMsDIACMDjPzTx8gPXT9JDZerL8XwcObcPA/g6thxjhhKOp22DkXuGyuGBgND+1F3tsZgMDDHwedgGNBhbrdbjcdjO9jSQ5coDiutIVlc2u3yO0ldbrfbI50r3wVTBMDwXTgoDg4OBGdLGhvmBOdOqtNlQKUHXfVut7NC4UgkYtruarWq0Wh0xHBEIhEVCgUNBgNL9fL9MJMcgHK5bAeqUChIkhWJol2GdaV4mkOI5AInCwsUjR66TK1Wh7a5PC9sI46EtXdlFDiHSCRiM0DYf5cJ3W4PrUIpBgYksEMumDueZbVamVQAhwlDAvsBywQb69YiJBIJs5tIJKJut6vb21vlcjlzMhxudPKcE/Tn2D6sCXUNDNnE6dDNazAYqFwuWxcml6Wdz+eqVqsWpMBIxuNxC8ROT09NNsD+0sGK4ZecHUk2CBNZRDQatfkx1ASMRiPrZNRsNtVsNvX5558rk8mo0+lYMS2DDTkryWTSAjjS9PP53DTT4fBBw391daVms2lSp9FopFDoUHCMpCAej5tUBZ8EcG+3Wxv2SWvfTCaj8XgsSRYUwXAiW5rP5+ZjsDPkUdvt1p4rnU6r3W4rGo0qn8+bg8enwYBy/3RlYr+4kEIAIgTd7zN03DfrB0tIzQr+mXPK7yOoxB8TsAJM2KHnecYqY+vB9XFXgJEBRgYYGWBkgJEBRn4wwxUKhf6/UChU+NDPSLKDhybbZUq4aTSTpJX/6/cfpRI3m41tEMwdBgEj5joBFp0FIR2PfhPd6nA4NO02AIGOdjabqd/vmwOEjWHaNwWSFJfCFqzXazMipAuAEpuDw4vFYtYJiftCpypJNzc3kmSp6FAoZLNARqORsQwwn8g4+Px+f+gec319rcFgoMlkouvra3vT52fv7u7sgPBv+/1hlkkqlTLgrFarymazRzpsSTaDgfV/n60dDofqdDrqdDoGdL7v297RFQm2tFQq6fb21gKQ7fYw0b7b7Wo2m5keGEYOg4f5RB7BPgPy7BmzKHCW8/nc/rBHoVBIt7e3ms/n5rSx5fF4bCwZ6/748WPT67Pfq9XqSBZTKpWsJoCZD8heEomEKpWKac+x7Vwup8vLS+XzeZPPXF5e6vLyUmdnZzZPheAqm80ag4x+G6cKEOCwCoWCBWA4Hc5VpVIxuQFAtNlsjPnjjO73e2UyGbM95mzATgNyT548kecdCsMLhYLevXun0WikTCZjP9vv93V3d2edwiKRiBW3+75/xMQyUBIAu7u7s/3A38Awsa+uLIcOTgRP7Bt2AEMF6+kGCtQ34BNwrLD69/f3xvAh7Xr06JExtL1ez0BHOoAJdQn4r3w+r/V6rUwmo2q1au17+/2+Sbdc2QTfT50J7PZmszFfAVDTXAHfCovP2kqyug18GGCHj47FYtrv9+bHXLlFcAUYGWBkgJEBRgYYGWDkA0Z+6Ppghmu/3/+/D35aMokD+s9QKGSHmXQeRhkKPczJQOLA27MkK/4lRcjbPYwJD8pbKr8TI3TfLilupMsMOnnuGWcEi0KaHzYCZ+OyjbAOkuxNnsOJjhsDRoax2RwKKovFom5vb1UoFGzQXLVaNSaAz9INZ7FYGDMDgOXzeTMy1hXGBt00wwCRmKDLx1l2Oh1VKhXF43HrqLRcLlUoFAxcAU5J1qaUA4i0glSuy9J5nmdzVmAah8OhSSDc4YC73aHgt9/vq9lsKpvNmhODvYLlBFTy+bxNIr++vlYoFNKzZ89ULpetBmG3e+hK42ryWWPAngOyWq10fn5ua4iTxSZcB7NarUyTzfNKMntFx04nJMCaVPV+vze5w/X1tWazmQE48zPS6bQKhYLu7u7UbreVy+W0Xq/V6/UMPLE3dNis7d3dnT0fjCbsDRIKajiQ88AkIWUgKJpMJup2u8pmsxacoYdmH9FT53I5FYtFtdttG/jJGSqVSmo0Grq4uFA2m1UsFrM2vb/73e/UarX0s5/9zLqEUavgskW73c4Kfbkffoa1JpDwfd+YagrdLy4u9Nd//dd6+/atGo2GyYYkGfvHME+cJvKpSqView+ooSWXDrOHkC+4tuuyerCBgBAF+QS4FGO7kg/OF7IJfBXBlFtrslqtjD1kPfCP7vBXfDPgxDpiB/hI/m42m1kTBu4J/xdchyvAyAAjA4wMMDLAyAAjXfXAH7p+kqEqgAZOj3QeBoXTc5ktt6CSh5lMJkqn0+bseIuEteJtmgPighMA44IBb6zVatXaSsLM8FYqyQ60q7ONRqNm2BRnZjIZTSYTxeNxa3ULc0LBMJvH99HFp9Vq6eLiQp536CSElGE2m+nx48f2vcwr8TzPGBuKVZfLpbFn7mbD9pTLZWMVeZuHvVqv16ZTJXCC+cSoKQBmP2CY7u7uTMqCfh6njlZ3s9lYOp2D3u/3rQ0pz/D06VPrijObzVStVq0LTSaTMacFgJDexlawtS+++MJYRBxUJpPR7e2t6Z8pQGW2BE7Bdf4ARL1eV6VSUbvdNvvbbA5zUABimKJY7KFNMOl0vgswxdkim3n06JFev36tWq1mYJxKpZRMJpVKpQyQYXUplEbXjCP2PE/VatU+70pa0CLP53O9efNGz549s8/MZjONx2MVi0XrVLTf7y24wDkC8tQ6uBpp9hYJ0tOnTzWbzVQoFMyBp9NpNZtNs5Mvv/xS3W7XJEGwxev1WvV6Xd1u1wKz6+trZbNZC2Y4V24BLg4V9gvn6mqs9/u9+YFkMqlXr17ZsE2aPhCgkg2QZMMge72erQN+i4wEzDFriX7fZfoAMX7/YrGwIJRAkkCm3+9ruVyqXq9boDYcDrVYLI5smLoTsiAM0GQNWH83mCaAABBcKRn2RsDm7rMbkAGMAE/QNON/dgUYGWBkgJEBRgYY+aePkR+SFP4kbeFxzLzduw7flUPA3LBA3DgOnv78tHddr9dWGMpFURvf7bKAHJzd7qG1J61wcaoYYqFQOJJk8HZKSpr092azsYNGW1RYkXw+r1js0GIVXTkaYPSgTOcmJbnf71UsFtVqtewel8ulTUlvt9vmJOhOEw6HjTEg5cq6MQuEgwALM58fJsvXarWjglOkEalUSoPBQN1u19qjxmIxtdttW4/JZKJqtarxeKxqtap2u63t9tDSExkKTCzPls1mrZiY1q6JREKpVEpff/21aaWZQr5aHVrL3t/f6/nz51YAKsnWAH26JCtARh8dDoetDiAWi+n09FSlUukoOKDDEg4qHo/bnnQ6Hb19+9b0/Bw4V6eOTfA8OBVsHHuFRcKGCaIo8qWIWZLK5bIVwiONSafTWi6X+uGHH4zVqlar5jAJUnK5nDk8ggjsgXtxwZk0uwuY9/f3ury81H6/183NjUqlkj0z9svvSaVSJg0JhQ5a9Hw+b0zkycmJ7u7uTKKwWq3UaDTUarVUKBSUy+XUbDaNGeb8E5AVCgV99913Gg6HWq1W6nQ6ltJnzg5DGN0MC/KZdDpten1qVdCzr1YrXV1dqdvt6vLyUt9++60FQYPBQKVSyQq9WQN07rBg2WxWyWTS6k/u7++1XC71/PlzrdeH7mwMrWRv8Rms2WazsVoTpEaJRELj8djqc9hXwInABxvDp1DsDRDgY9y9G4/HJoHq9/sGGAzHJDjn9yAz4t4kmS93QQwQCq4//gowMsDIACMDjAww8tPASNcXv3/9ZHO4SImzyZLMKXAzyCncIlUYFYyedCIMEOlGZg/wpr7b7cz5u+yCJHtLh8mAiWNB9vvDBHiKeZFWkEZkyF8+n9fbt2+N7eJNHpDjvkOhkAaDgT27y3TwfOhn3717Z2/9pLQBGuZHoO2fTCZH8gqMDwcM47nZbGzQJOtaLBbV7XbtAMRiMQPicPgw0K/dbtsBp1AYaQWgt16vzUkxn6HVaikSiVjh52x2GKiHBAPGEUdH56kvv/zS9h+2gBamvu+r2+2qWCxqMpkok8mYRGYymdjz4hilg7yGAzOZTKyjEywSmutkMqlEImGAi0SgXC4rk8no4uLCpDIAFG1eC4WCpeqxGxy325EKhoTOTbBcZ2dntiawOxTa9no9s3nP80y7TS3DP//zP+vv/u7v9Pnnn+vVq1dHUo7pdKparSZJVmxLATET2bFVaj4k6e3bt0qlUppMJlqtVspkMqZdp4gam3KlHG6QAiDe399b8SvSm2g0qlqtpt/+9reKRqNmu7CrrCH1CWdnZ+ZHarWa6alh79PptBKJhKrVqv7lX/5F6XRaFxcX+uabb6xgleGs3Odms1EqlbIzncvlrOA9k8mYjnu5XOr6+tr0++l02ljn2WxmLCvAR8aB4IirXq/r9vZWkozlWy6XxoIzcNOtr3HZNor7i8WiyXa4d+yRYnSez/d985PdbleSjKUH1CWZXVM0TtDtZqzwN/hqScaYIruQHmRvwfVxV4CRAUYGGBlgZICRnwZGfuj60S9cfDkOnfahOH7eWvm5ZDJpG+QyprzNhkIhjUYjcwYwJrCD0kP3pu12a9p0ggEkBxQuJhIJ3d/fK51OW+pPkqXh+TuMjy4l6/XaZmqEQiFz+K4unnt49+6dGQg/j54cJwQbgea0WCyaTvf09FStVsvS1KTjcf6wooAwQJRMJtVqteR5h0nqg8HAHPyzZ8+Uy+VMZuEWpbq6bYokOZiVSkX7/d6c4mAw0OPHj00jDAjDSGHwGCn3Tq0B0gdkKzA2kgw4YIhc54o2eDQaGYMCA4SOHfaGNVmtVsaYbbdbDQYDOwAwNPF43JgwGMdIJHJUqMwhf/LkiRUFs94ETNg17Op2e+juFQ6HdXJyYkyg7x86Hb169coYs06no7OzM9VqNXU6HWNXWJdEIqF6va58Pq9//dd/1eXlpc7Pz43VwSnu9w+DBCkypwMSjoezR/1CKpUy9q3T6diezedzDYdDk6CgQ4e95Hl3u53JiAqFgi4vL/Xq1Sv98pe/NCb35cuXNk8F/frr16/15MkTe9azszNbz+VyqUqlYvImingJwB49emS2fXt7q3Q6bQ4TX8Mz02EqlUpZcexqtVKpVLKfQQqy3W6tNsb3fbVaLdXrdfNJSIfYf9/3Va/XNRqNzEexTrTVDYUOBfUw2/xumGQCSRccCAqRvGB/BL+e5xkYLJeHtsPYYb/fN5+ED4XZTqfT1h0NWQ9/tttDtyWkKvv9/kjuhk8n4GBvcrncj4WMT+4KMDLAyAAjA4wMMPLTwMgPkZLhr7766keBya9+9auvMpnMkdHCgvHlLAybDzBwqCm+5OcikYfhgtVq1Vg4GL14PG7pRDY5FDrMW3AdOanTfr9vGuBEImEpZUlHWmM2D4crPbSplA4gRvEj7BMHmbdqfh5QodgvmUyqVqspHo8bGwTL5hYSswa9Xk/ZbFanp6dmYDAbbDzGFI/HdXFxYU6X54pGD8PiEomEGR/rjhYWSQm/f7VamVMEoBuNhrUd7ff7xj65xYiAAOuIsbIfAEQqldLz589Nv89Axu12a0CAnh1nLclY3EQiYfrlUCik+/t7kxHc39+rVCpZ+prDDuMZCj20K0UakMlk9Nlnn2mz2ei7774zMN7tdib7SCQSyuVyxhTyWQBruVxawW8+n9fJyYkFPtvt1mQFpOTb7bb+8z//U7lczgqOCYyy2aw5D9rdzmYza4263W7VbDaN3SKAWi6XarfbisViR/MyYItIoTMXhp+ZTCaaTqc2PLHb7VrgBXDzfJ7nGUBSe0E9xZs3b4xt/eabb1QoFMw+CGQIAtLptEkZdrudrq6uNBqNzHbC4bAVGIdCIZ2fn1srZ0AIOYrnefb7OcfZbPZI/hMOh3V6eqp2u61CoaD1eq1Op2NtgmHyNpuNhsOhHj16pHw+L0l2NtPptGq1mjKZjMlastmsZSV6vZ42m43q9bokWdc0mOLRaKTpdGqBIdpxAq1QKGR6dAIo6bilOLIk/o0AnrPtatD5nbDMXAyyJQDCfjgb+B8kStQVJBIJA8FWq3X/1Vdf/d8fBRyf0BVgZICRAUYGGBlg5KeBkbPZTH//93//f34fFvwkGS4YEt7ud7tDi8n3H5C3WTYexmW321nRHGnP+XxuvfclWTEhYITmnUWG+YvH49YJhoI7V3YxmUzsXvguUqGLxcJSp/v93obLsZlok0nbb7eHTjgYDQ6TjQEwcPyk0TebjS4uLrTZbIyd+M1vfqPpdKpsNqt2u61sNmtv0XThgXEaDocGgsVi0e61Vqup2WxaqtwtUoQNpeARbSvT6m9ubszRdLtdc+SRSMQm0+92O11eXiqRSNi9S7ICajSw6F4pVMahEWyMx+P/tvcABQ4kn8+b42JPPc8zm8B+isWisZA43/v7e2PNSMnDcnS7XUUiEdVqNZM3XF1dqdFoqNls6he/+IXZCRKIbrdrsgoKg2FvKaRmxgbBC84J5whzdHNzo5/97Gc6OTkxllGSAT/Fx6T4T09PVa/X1Wg0jCFFKiTpqFiV9rWwgNgcRev1et1S5+Px2EABEGNPKEjGftzgSpI5q9VqRQCuRqOh0Wikzz77TI8ePVK5XDYQBETp4PXNN9/I93397Gc/0+npqXq9nsbjsbU69n1fvV7PaitYH6Q5L1++VLVatUJwZCHY/G536BaGI0yn02bTnC0YNRg0ztl+v7eubUhjcNa0ox4MBppOp1b7AmsMEw7AwtwDwpx/mOj1em02Q8DqniOAjsCA+htAhQCQ4NUFl3Q6rX6/r0QiYWeGoDcWi5mNAMjsNZkKzqqbMeDvguvjrgAjA4wMMDLAyAAjPw2M/ND1kw1VwdAxNP6Ow4JR4Fhg50hXk7KHcZAeujOFQg+zSzB+/peCSt402VjAAd38fr+3jWdR0RDf3d2p0WhoNjsM1UOjO5vNrAA1mUxaSpT7dNPVvClTaMrnkY3AyFBwS/HgarVSt9u1wlmYBABitVrZRiNfIJ283++tYJqDAZMZiRzaDgOirgyEe4IFhOVAY3t1daXVamWHkAMxnU41GAzU6XT07t07S8+7aXnuh0GNDGncbrfmwNGhY7DofwkIYMTC4cOEcQo+R6ORWq2WsaKbzcZkA4lEQoVCQbvdoXNOJpOx37VeH1oOl0olazVKQABTVSqVdHp6quVyqW+++cYmq1cqFX3++ec6PT21lsHsdzQaVblctuCDdrJof906hlarZXvzzTffmBQnn8/L8zx1Oh17tmw2a3ZEG13kDjh3bAvQ4kygq55MJlosFrZ2PDtyBbpz8fzff/+98vm8zs/P7exGo1F1Oh2TFME8b7dbO7fY1eXlpbbbrX79619rvz90dqKQ+OzszCQ2i8XCGHCGVI5GI9Xrdf3Zn/2ZisWiLi8vFYkchndGIhHd3d3Z2SWIjEQiNmgxHo/bOZFkhfHY3d3dnd6+fXskNXr27JnC4bB1kyJYTSaTts9uTQhMpiRbO9aXAajYPt/reYd23gTXbq0Mhc6+7yubzdqzsO+wk5KM5XblW7DsBGKS7Dw3Gg3d3t4qFDq0nEaS5HmenSv8FmCCH0B7j2zClfHg84Lrf3YFGBlgZICRAUYGGPnpYuRP0hYe+QPSBdgFUtoACm+NbocaGBpXLiE9FBrjqAAZfpaf4fPu99HhCDByP59MJq3gFQ0s6X46xbia6nq9bvMv5vP50WGiqHM0Gtk6+L5vRp3P540JgTkLh8Pq9XpWfDibzXR6eqqXL18eFariMEejkc1VgKVBHxuPx3VycqLVamVsxJMnTzSfz9Xr9ZTP503TnU6nrYMTBk/xMhp/GEKKMHHYvu8rl8vZYZxOp1qv18a2brcPgxTRSTNA0vd90922Wi1zvADOu3fvTLu/3++N9eEA0LUIZ8z9ICOAwctms8ZAsl4806tXr6y48+TkxIIBWAvuG20+czJms5l1duLQVqtVA735fG7gzKwPPucGAc1mU2dnZ6bX7vf7JsPY7Q7Fsvl8Xq1WS/1+3+QEy+VSd3d38n3fOm1R7Nxut40B5LAz3I9gio5QbtBE6n+32+ndu3daLA5zO2q1mp2n3W5n3bmq1aqxUpvNxtYBQMbh7nY71et1DYdDq7XgTDKIsd1uK5FI6Pz83NYMX8F8DWywXC6rVquZDMT1FbTTLRQKajabBiwwVuFw2O55v9+r1WppMpno2bNn+uabbyywgWlGPuDKJubzuc7Ozow7aYQLAAAgAElEQVQ1m0wmKpfLFvSSBaAQmkBhs9lYdyw6uDE01m35u9lsTF6CY6cmRDrMMELyROBNUEFGgLPntr2eTqfmC9DgY/uZTMaCOYqn8WPRaPSIbYStRbaEfw5euP5nV4CRAUYGGBlgZICRf/oY+aHrJ3nhwrGTdoQ9Qu/ITfDWTREiThkHgWFR+MaDkAJl8yWZLCEajdqi7nY7KyxF84wsgMX3vEPLWNpi4lj4X74TjexisVC/37fngp2kiwkAypsu+vlMJmNFfLFYzNpM1uv1I200aV/AByOEmRkMBjYvgftMJpN2H/f393rx4oUk/TfZSKvV0u3trbLZrGlp6azCm7wkc8owdI8ePbLvdmUNpGppxTsej20dJBkzBmDQ3hbpDEwGzA5/YF7R0gJwtPZkTgva43a7/d/kGNFoVPf399ZVCWBHerFYLKy96GKxUCaTMYf76NEjY0TPz8/16NEjvXz50uokYDvQT/M5vjedTlsBajabNTasUqloMBhYmp/Ps85IMgiunj59qu320OVrsVioWq3q8ePHur6+VrvdNh09TNJut7OfpXiTlD9SFhxLNBpVvV5Xp9MxtjeTySifz5v85erqypysO2eDfUdKwXp0Oh1zduv1YWZIvV7Xr3/9axtqSXcz5qTAcPu+b521UqnUUbvhRqOh/X6vWq2mVCplc2hc34LU4MWLF/bszPhxbRJwYq4P7NdsNrMBqLBp7A0F8+7gRgJYmC7X5geDgTl1AtRcLqfJZGISjlKpdAQKABhniDONryMLgZQLf+rWkVC3gh/DHpE4ENhQeA+g+b5vv9OV9eAfYfYBGoJIgsfg+p9dAUYGGBlgZICRAUb+aWPkh64fLSlEM+3KIAAW/qDldcGG/2ZRMVQcNBpfHpif4yFdxpDDSAet0WhkjhJH7RowYMEisVGwEdznbDbTcDg0QxmPx5pMJpZuJiVNur1Wq6lSqSifz9shR7Pu+74VI3OYJens7MwKTknXwrgAbKQ0AUgYDLdw8+LiQvv93lqQ8v+RG7iDIV2WZrfbGWOAkedyOSuMTacPU7xJ22+3W5tfUCqVVC6XjQl0GdlkMmlaf1gr9pa2o+Fw2CbMc9ByuZwqlYp1jxoMBsZU4rQ2m40B9Ww2U7vd1mAwMOdKS1VJKhQKBrCx2GF2xNOnTw3I0Rij6Z1MJspmszaLYzweG9t2e3troMRcE/TotL6dTCbGnC6XS2NgmB2BLVCQC2tMyh9nXSqVzGnm8/9/e18eHHd5n/98917tfUkrWZJl4wMbglufgA0YTGJzmuEcaNJOW4OnMxwZAgFMyzGlJKEBQqekM6Rt6ik0DWCuKYcBA6XEMZdtbGNsA7YkW5Kl1Z7SSrva6/fH/p6P3xU+sZyA/T4zHoy1+u77fY/P877P+zn8sFqtouJ1dXWJkaYiQ1C9I5mXy+UaF45oNIr6+nqMHz9eVGIq2azxMzQ0hEQiISSTzWaRSCTQ19dX4/5C9ZebB7rW0D+dBRq9Xi9cLheCwaCohDTI3OwxVoB9AwBffvklEokESqUSWlpaJDg8m82iq6sLPT09QqTBYBCBQEAMMDehmUxGgsiz2ayk9aVLV0dHB0wmkxhYbkJ4uDAMQ9Il012ImwBuzmhnqHoywJubDqpkzBo2MjIitUCovpIAaItITNxAc9NNhVHNJEZ3MPU76NrFdck04P39/RI/wtuM0Zt12izOVwAyp2grNA4fmiM1R2qO1BypOfLE4MiD3XId9YGLhokvS9VleHi4JgiNpEJjQ2WNnc7TMA053SL4/zSaNBT8w+/hs0dfYTONJztdzayUy+XkZ1QVVT9z+hvz6pdX/Jy0/B0uer5vPB6Xmg88FVOR++STT7Bnzx65nqTCAaCG2JjBhQYyk8mgu7tbVEBOFLvdjo6ODgmS3b59u2SIUd1LSFAcs0KhIOTGwGVgX5VvjhNrKVitVjQ2NsrVOABRz6igUe3jFW0qlZIxZt0MqpP5fF5UJZvNJpmHuDlgPAOvaUk2NNw2m00KYyaTSQwMDIhhpfLKIEwqMZVKRa7o6TceiUSQTqdF1S2VStixYwfC4TAmTZokrh6Dg4OymOg3D0DmLTc8nE/0Wy+Xq8GpKmkx2JvqFDMicQF7vV7x/aaSxHovxWJRFF81oJWbBKp1+XxegpcZZEzle2hoSGIgSFQ0JIz3iEajGB4eRnd3twTq0gWFKhvXC9cFXQkMwxD3jr6+Pnln1vlhnzHeIBQKSYwJx59rsFSqBsVmMhlxbSiXy7J+GJPCWBeuc25C0+m0ZDaiHWHfkLjYX3a7XYiTczIcDstGj+uOqhmVXLqxAJB3JLhpGxkZkc0g5zbXrDpvuLmifWEa6Lq6OlHSVFVQdfthG5iFjRsR3izQztHliWPFdUa7TFtN+8U5zzmucWTQHKk5UnOk5kjNkScGRx7zAxewr3Aj/7DD2Dh2GF+MDebv0tDRf5MvwMGjSwQHhROAKgoNOTubn7fb7QgGg5KRiadbnmjD4bCQA69W6W7AEzLVHw4efd8DgQCCwaD4blPpKZWqaVtdLpcs0kKhIAGVVAVpkOjfSr9sKmWqoaCxILHR4PKKs6enB8FgEIZhSDs4ETiBadj6+/uRSCTkmVSkDMNAMpkU40kljCf8zs5OlMtlNDQ0iCsEFQcugEKhIIHUhmGIqwQXBFNnUjXk75RK1WxWVHTY93v37q1ZbCQNr9eLdDqNeDyOUCiEjo4OlMtVP9pgMCi+wUA1QxTHolQqob+/X/qdfum8gqaSs3PnTng8HjQ2NiIYDNb4jLPoH0mU78G5XigUkEwmUS6XRTGkqwiDhBsbG0WNVI0ZlSYaA24uuLkhGbPuDgmGmymuG5vNVrOh40aZfU0DVyqVRGXL5/M1Wc84ltyscX1yA0jFm0osCYDGlyo8FfNKpSKxIHa7HaFQqMZFge5CpVKpxu2mUChgx44d6OnpkTEFIKofA3E5Z7lhSKVSsp7pB67GchQKBVGfh4eHZa3ScLNeCzcztA/cANKNhO3lnKLyzI20qorxhoI/UzeyDPIlUXCzSLunGvx8Pi8kwvVBG1osFtHf3y8+8LSH3FSqv8sDAO0x+5/ziDaYSqaaiUvj8KA5UnOk5kjNkZojTwyOVA+TozEmaeHVSc9Bph8nO4zqV6lUkkEhibCjSeh8GRICf8bv4xWw+pnBwUHU19fLSZpXph6PB+PGjUM2m5UCiCzERyNIZYJuCWwDryGZNYWKEFC9hme7uGDZNk4CXu9ykRUKBXE/YDpTKjA05jS07EcqMJws7EcaXU7aHTt2wOl0SqFAXkPTj7tSqUjwMokRqKoNVDLpg1qpVOR6mcY2Ho+jUCigpaVF+oXqaKGwr6An/cgbGhrQ19cnEz+dTku9FKqrVLxSqZSoFoZhSJs4zj09PbBaraKyceGyejwDNXnlrBp1ADLO6XRavoNX4rFYTDYDdKfhvNq1axcASB0YGm2SK+c+r87ZBrqfcCFSOePmIJVKoaGhQQJFuah5zU0VKZPJIBAIiNHnlTznkWrASFLlcrkmdbRhGOjt7ZV+4NpjfxcK1RTKhmGI/zjbzXSy7HNmFVMJIBaLydU8DRmLCdbV1YkKyEBtEh6VLAASPO12u2XzQVJgUUYaRsYYMB6gVCrJZo/KMgmgUCjIOs/n8/LODodD3OOYDphzzev1wmaziRqmuhLZbDZZV6oqR/cYFn+k2xjnFAOd+XnaCm66GeBOomGf0E1DVYa51lR7yrlGe0sXF9Vdhu4jnJ/lchnJZBI2m00SNVC1p52l7WWgO+eoxpFBc6TmSM2RmiM1R54YHHnMD1z8Yr4oJxv/rhpZ9TqOUP0s6SupKg58ESokNAZ8LgmrUCggkUhI5hQaKU4ei8WCkZERUWMGBgYkmI8TnORBQ+1yucTg22w2mbg0WjTqbAdVLSpg9FOmapLP5+H3++HxeJBMJlFfX49yuSx+8MViNaiXwclUK3kCVw0Cfd7Zjq6uLkydOlWeS3ImaQcCgZpUmixSWC6X4fV6YbdXC/9Rmezt7UUoFJKARbUSN9UuBmZzLpTLZWQyGVn0VAxI0IlEQtQlqmW8eiaZc9GoAcVMQctFp7phjIyMoKWlBel0GuFwWPx4aQSYoYbX3CQDzlfOrUwmI1myOBZUVkOhEEymfcHgXIAcI7oe0IBRIaYrh5oRiO4b6vqxWq1SiJIKYTabRTKZRDAYRFdXl5DA4OAgANQYDM5JGj3GajDo1jAMMWR0UzGbzVIUk2RL9ZKbQ6vVKkHtTDtMAuW8Z50R9g+LgiaTSVEis9msuD6oAatUeFkDh2ud71QsFmVzwmfV19dLDIq6BhmETDKmQq/GnvDdaLNUY8pNlclUDdj2eDySTYubLH6OQcfsA6vVKrEFXKvcdNE+qYaftwJ1dXUoFAriy+5wOGTcmFGL655zhc8gSXH+cQ0xSxbdRKjIMY6BdpDPoH3gZodt5vxU1yQ3oRqHD82RmiM1R2qO1Bx5YnDkwTBmaeHV6zS+tPp3Tg71BdhgvgAn08DAgHQ8/52dyGfw56VSSbKMJBIJ7N27F5FIRK6nmTWkXC7LKZuneV7FUwVhZ/G7eM1OX26qcrlcTpQqq9Vak3mFyiXdHQDUqCpcSGyXw+FAV1eXbGS44Hl1yWdQSeDJ3+FwwGq1IhqNSv0FFizklSgz2wCQyUI3BLbBbrdL8K/qN852NzY2iopE40a1i+NGhYdtpwpRLpdFDWNWJHUBq9e2bAsXAic/1Zbh4WHx4R4aGhL1goYin8/D5/OJCwhVRX4vlU81RoHvQENAI02yLBQKCIVCKJdriwHSt5nKEecLr7j5TCpKbrcbe/bsEcWXhGSz2WQTwmtvvovH40GxWJTMRXQJ4OfoEkAFjkaSrjRerxcDAwPiFqMaVV7pc25xftOdxuVySUYgdUNC9xjObxIo+9xut8vGJxAISOpl1puhmsz1x/VWKBRkc0Gb4PF4MDw8DJOpGjDd1dUlREmi47NIogyQ5+fMZjNSqRTq6uoQDAZlE8W6InSbUG0XN0e0U+wDjjM3lhxn9i1jC6j6cf2SBGi7OOdoC8xmswTdcoPG/h69UeXmcbSfOJVwrgnGb9CHnzaB4895xI0hx4bKKm8y2D9ck2yHxpFDc6TmSM2RmiM1Rx7/HKl+72iMyYGLJ1V+If9LULnjgPG6nx2oGib61arXf+oLjFbt+H00tplMBtu2bcOsWbNqcvrzD324meaVQZdut1tO23we/04lzefzwWq1ykTv7u6uCWTlYjaZ9mVQUaux09eXz25tbZVry0KhIKlk6Vqh+o3yqpMn7EKhGvAcCARqFojT6UR3d7eQw86dO2XxU3kgoVitVkQiEfGNpXLF4EuqqjQ0NF5UopixhoubfrdUxWi06cISDAbFiJJ8uDjpEkJCUhU8wzDk+pzpO9UNBlVHLkLVz5xt48LnomQNEz5fdWehAkcDTqNLMlcVUT6fbitUf9TFykBg+qBTQaWxoEJG94a+vj44HA40NDRIgctAIFCjeDKFLo06XQHUjQjdBkgQlUrVP5y++sViEQ0NDejq6oLP54PZbJb0rvws+5dkTJKjkWVbmO5YdV9isUP2ibrZpOFi39E1hyor56PH4xHXI8ajcF2qKjc3H1TiqbDt3bsXFotFAtIHBgZE9aKbCjdy6iaFc4uKMlVFKpecr+xvFljkuHPNcMzZpyRp2j66UKgqNtU9ko/677SRJFUaftpZoEpaPp9PXD34DqPVP/Yj55C6SefvqbaXdk7jyKE5UnOk5kjNkZojj3+OPBiO+sClGl51krAjVALhhAIgjefVKE/dxWJROpoGh9eU/AwHn9/NSWW1VrMEFYvVwmq8VvV4PGI8eBIFqqdeEhcJrVwuSyYfprelgXc6nTCZTFK7g2oSDSoXcT6fl4xLqgqpZk5hHZTGxkbs3bsXiURC3EWoZDE4lxOX78B2ms1mDA0NiYLCYMJSqRqQzBoQNFI09iQSTmSqegzsVA0S3Sf4Hm63G8FgUCp+8yqWxoBjzTnBTQGVUMYDqP7zqspntVpFMaMyQqWRi8LpdCKTycBisUitkGKxWnuDKU1JApxzJPRYLFYTm6AqKnRjYZA5r+LV9KI0XKx5wc0Pv4dtzeVyUmQwlUqhpaUFqVQKiUQCkUhENgP0xafxYQarVCoFr9crV+a8nk+n04hEIkgkEkilUtInZrNZrt3t9mr9DwaYU+2hm0koFBIllaosMyYxIxdJRXWLoFGk4aZSSHcCGkQqwtlstka9p8pIBZDKKjdNJA9mVqKhK5VKcLvd6Ovrk3o7nJOGYUiNEI4jyYLznmqraoc49txo0aWKrjzcGJBY2A4aev4b1dZ8Pi9xDACEABkYzmerY8V+JSF4PB4YhiHpm6lKMiaG9oTKJG0gbz0ASBttNltNFjgAYkdUwmK71M0+n6u6UHDMDqbeaewfmiM1R2qO1BypOVJz5JjccKkTRG0klRn+nAaEJ1KLZV9xMWZVUn/Gl+bfeY3PzuQkpcExmUyYMGECAEiNCWY8omGmywQJyOfziSsEoX4/fWqBagG3zs5OfP755/IMTpTRCg4ntdVqlUVPtSifr9Zx6OzsRF9fH/bs2SOTi2lw6YZBv14uSpX0BgcH4XK5EI1GpS8GBwcxY8YM5PN5bNmyRUhZNcScbCRM1dWDV/bsT/qTM00rFZru7m6ZmGz76E0Dx9xqtcrvm0z7aqdQgaBRAPZtMurq6hCPx2XxUHXKZDJwu92IRCJixGn0aDA5Pzge3ATQEFFBAyAplNVid6rySNWF84kES6KjIeRC93g88Pl8SKVSMAwDoVAIQFUtCYfDSCaTEuPA7ySpqwaKBMmUuIZhIJPJYO/evUKY2WwWPp9PsjypPtsMMOf1PQDZkAwNDaG+vl5USvYFMw5R+aN/NucyCYXGj3ESVHVI4sz6w7VAZY/9wLlsNptrqt6TBFiXZmRkBG63W9Iwc66l02k0NDSIoksXA7oHlMtlST1rt1drx3R2doq7DNcENwvDw8NoamqC3+8X/3uuNRI5VWVmcKO6y/ZXKhXxi+ezuVbpFsQ+49znvKFSrvqAcy5QKeYGnT/j+7EPOE+LxSL8fr+4LFGh5A0LlXU+h+tcVSyBfXWgVDI5lLuExoGhOVJzpOZIzZGaI49/jjwYxqTwMX2xafj57zx9UzVio3ji5wLkwqI6wkEHIIXraGB59U1ViJOXuf75d/Wan4GefGZ/f78QBK+C2alMZ0niYuHGxsZGpFIpbN++XfyUed3PK3oayeHhYaRSKfHVVo2VYVSv5nt6emAYBhKJBLq7uwFA+oL9QCXHbrfD4/HU+OLSxYBF5JxOJ/x+f40fNwkmn8+ju7sbvb294jLAsVBVBKqnHKuhoSExUCxkmMvl0NHRgXg8jkwmI4uZV+hcGJy0Pp9PUsWyQB8VWMYKZDIZMWSsG0NFhUHZnPCpVAqdnZ2yIICqgaKrB9+DZEkDxT5jXZRMJgOTyYRgMCgbArqykMipnNHtgXNaVX2YWpguAslkUowgDWM4HIbT6YTL5UJTU5PMdTW4nMaIhEtVk4HDHDO73S4+51Tf6O/NOUQi4Tqx2WwYGBgQRS6bzUrqWbrZMFDX5/MhFAohGo1KXAXXId2L1DnCfgWq6pBaIJPBuSx+yTnB9yZR0RZQUaNbBgPWuY6prFYqFVkbdEFwOBxiWyqVimwuuMmpr68Xu6IqeexHrgUqu5x7fr9fNickqXg8LqmtuTapKPN53BjS95zvyT8kFLpVcV1wI6POBW4GuMkicfD9qaKyP+lWQ3cfrg2uCapwJEWV/NgvqusF1VnaT40jg+ZIzZGaIzVHApojT3SOPOobLp5KqcwR6kmPL001gQaTV3scFBITDbyq3vHakAPOF6cqlE6nJXiXAYlutxu7du0SsmJbmPWEp1X1upEdOzQ0JGpOS0sLBgcHsXPnTjGOnFxUxPhfKh5UcnhlWShUs+5QHcjn86IWmM3Vqun0T6eqNjg4WKO+0F/b4XCIzzdP1W63G729vejv70cul5NifJFIBCMjI3J1zaBU+unyPThGVFI56dhn+Xy+JgVqJBKRMaaRYI0IKl2NjY0YGRmRaufcDAQCAVEuWAeDV+Uca9V9gvVh6P5Bdw2LxQKv1ytX1Iw1oK8/DTSVZC4Mtpvzja4TXHB0hVBT+LIejMlkqlFYaVCpOjM42+/3i6H1eDxi4NkWtofqLH32OS/K5WrcgmqQmXL5888/x5QpU5BOp2G1WiUNbaFQkIKNDPil6weNaqFQDZLeuXMnotGo9ENfXx8Mw5CMUqqxJXFynBlwyk1GIBCQAGyOh8/nk+9ikDD7iWRKA8sNJNPbulwuUVpJMm63WzYjVGX5vFKpWvNEvS3gxg4A4vE4otGouNjQtYbzm6l+acdIwvwObpA4HzgHuru7ZYNitVZTMqu3F9ygqko1XUY49kBtEgCqeqwnMzw8LHEIwL70zXxvlRxUVZBq5uDgYI166nQ6RVnmnKeiSCUcgKh2/B0AshHQODJojtQcqTlSc6TmyBODI1WhYzSO+sBFBY1Xkzz5lUr7AsvUU6J6omZn8DqQn+UplM8bHBysuRrnlSdJgAoH/+7z+VBfX4/29nbJYEPDRJLiCdlkMslJmYuFV+Z2e7X6Nw1iOByWd6GKwEnHgeE1dG9vL4aHq9XT1Wthr9crxjkUComKRLWD1630V+Vi4gSn/y8/w76hSkdjsnv3blGdWOvB6/XC6/WKcWdqXrpwUEmge0ulUpEikqobCJUSTnCqazQSVH1ohBjUCaAme9Hw8DCcTidCoRASiYQoG3wWr8YtlmrhPsYiUIXgVTKJLpPJwOl0fiVAuVgsCqGqriNUKJl6lAqeSqIkEdbtUK//k8lkzeJTA6u5+AAISQwMDMg8NAxD0gJTbaF6RQMybtw4GV+mQ2UgtsPhQGtrq/gcs1gmlSv+O/9rtVoRj8cxPDwshRsHBgZqCiem02nJXEZ3EG4Sc7lczUYunU5LZizOm3g8LobSYrGgoaEBiURCFEYaKCp8dDmgXVD92iORiMwjEgVjIDweT417Euch390wDMTjcTgcDtTX14tiWShUU2KHw2HZyJpMJonBoLJLMuA6J4kx85gaO1MuV2vGMCUzbdvIyIgQJ5VBkgPHl/Od85q2k/aMCiLtHolXfT7nP9etx+Op2bz7fD4hco6TmoWN/c7+44af859jw80757vG4UNzpOZIzZGaIzVHnhgceUwPXPwCnpjVhaouKF5JU42jKkJli1ArPpvNZjEufHm+HJ/J57I2Q6VSQXNzMywWC9rb20W9Khb3VWGnwaOawkVH48fvLhQKCAQC6OjoqHFBiEQiACBXxvy9dDoti4yKYltbG8LhsKQoJVFOmTIFXq8XnZ2dYvRVIuPpW/Xhz+VyUuyPi4z+90zpOTQ0JG4O9KE3DEMmGQN5eZpnH/L9M5mMkAtVRqZtpYsGx4jGPJPJiI8zVQWqHRxLjjF9zWlEaaTYnwDEwJM8M5mM9AevwWmYGG/Q19cnag7Ji20DIG4hJCmqh6ydQrJmYCuzUHHBs34EUF3kPT09UqiyUChIlqFwOAwASKVSUqCQqk8gEIDFYpHikrxap5LHRW2xWESNVZVqZpqaPHlyjWsNXQ+4tlRfZ5Iu1U6/3y+GPJfLiboXiUQQDAaRzWaRy+UQDoel5gh/l6oqXQicTqe4CTidTsm45PP50NvbK9mUWCeF70wFOh6PI5/Py2aJbiicwzSYQ0NDkmGLV/9ci1Qb6ctfV1cn6j3Jga4VVKVTqRR8Pp8QDPuLtoGEQEM9MlItsEgljORM4kyn01IsVVXWqH5yLpLs6SbGtUfFuVQqiZrLeca1qm4MVOUOgGxGuaEhGTHYn+4m3HRwI865zX7hBp82VR0T1Q1O48igOVJzpOZIzZGaI08MjjwYxiwtPK/uaUi48Gj8aSypdLFaNRvMU6zdbhf1j1fMNCAkKC58fjevPm02m6gPnOAsCuh0OsWAcdHSGPLkSwNWLBZr1DSqHbxO50CqfqNUX0qlavXyYDBY497Q1taGzz//HC0tLfD5fNizZ4+oc3V1dejo6JD3Vq+qGfQZDofF8HV1dUnfRyIROeXzahqAXGnTCPT392NkpJp9h76mJDwqmHRRUINfaRB4Ne/z+dDX1yfKBjcLXIAWi0VUnXQ6Lf7FVGy4UNjvgUBA0g97PB65VuaE5jtwwjPTDomUKh2LOfIqm37jbrdbxtThqBZ3ZMYeLjYA4ibC7En8/1wuJ58ZGRmROiYjIyNobW0VozIyUq19wnnDvk6n0+LaQqVI9R8H9qUEJjGwTwOBgBgPqqeBQKDmyp5qC10V6B9PVxTOp1wuJz7v6nU+r+1TqRSmTZsm4x4IBNDe3g6v1ysqHA04XRrY90xHy3VkGAYikQh2794t/Z3NZsUNgil3uVHi+9Gff+/evRKQzN8PhULo6emR92Z2NP4uU1ZzQ8L2MCsSNyZUG+kmRIJPJpOiWHLtqRtZADUuKcyWRCNNn/9yuSzqIn9Gu8NNEO1huVyW2iWZTEaU0kqlIu9J9y9+lptd+tCr5McNE7+bz6ELWD6/rw4JNye8JWGfqSTKeU+SUWMRNI4MmiM1R2qO1BypOfL458iDYUxcCnnKVo0rO4y+nTwF0ojw6pAkw5dXr+P4WZ6Maew5uDyd06gVCgX09PSIWhQKhTA4OAiTyYT6+nq53qbPLweaKhWv8n0+HxyOah0IBv8ODg7KNT9rQHAwbDab1ICg0QqHw3INW6lU5Jkulwvjx4/Hli1bxP2AKiaJKBwOw2qtpsXl5HG73Zg+fTq6urqElCuVCvr7+8Wlg4od3UlIpvF4HCaTCU1NTXA6nYjFYuKSQIPGKuY0SPl8XrK3UKE0m82IxWJCVHxXnvg5EZnlB4D4Zg8MDEilew51IQMAACAASURBVAb58mrY5XIBgFRdV8dYnStUGc1ms/S/w+GQKvVUYvm9JGf+O6/d1c0P1R/6KCcSCTHu+3OhKper/vd+v1/GgQs7n99XT4OGl8Ypk8lg165dcDqdophFo1H5bn6O64kkxr6ORqOyVtT0xHwft9uNdDqNVCqFUCgkxot/qIoxILmjo0NiIUKhkAR2h8NhRKNRSR8MAOPHj0cgEEAmkxF1k2uTG0h+XyqVQn9/PyZPniwqHJV1GjT2D9NCcy4lk0lJwRuLxdDW1iaKPxV7m80mWcHoD64q/dxo0EUlFouhvr5e+oD/TafTQop9fX3i5021jWRUKpWEnLmumMnIbDaLkm632+FyueQ9VdJQN3Ykc66pSqUiyqtK8Pzu0QkLGFdDe2EymcTGcO6zbdwMqX71/Jzq+sJ5rR4IqPqqdlsdB43Dh+ZIzZGaIzVHao48MTjyYDjqA5fZbJaFwD9sIINZ2VG8gqd7AUGiUH3V+QwaDKpzKvFQLWhpaUGpVEJPT48EH9rtdjHKdKNQ3WLUUzlVNxISDRMVmP35b5PISJjFYhFer1cWiNlsRjabFXLgoJtMJnR2dkpWqZaWFvT398NkMuGkk05CqVRCf38/HA6HnNIrlWqWGRohZlwioXR2dqK1tRXjxo2Tq1GmvYzFYjAMA8FgsCaQGICkJk0kEkJEJGcaQ7oVcGypCHFcqHT5/X658lX91tn3mUwGnZ2dogiy36k20Ve5rq6uxl+bC4jfwz5h3Rcu/r6+PtTX16O/v1/UJNZtIcFms1kxZiT2oaEhCRANhULiUsGrbbq/cKPDjFyhUEiqw1M1IanSoAwN7SuqWC6XMW7cOGQyGZmLNM4+n0/eGYBkrCqVSohEIigWixgeHhbV0e/3i7sRx3B4eLhmA8D5SNXH4XCgs7MTuVwOfX19ACD9zk0X20PVs7m5WeIqxo0bB5vNhnQ6Da/Xi3g8LkGshUIBDQ0NssmigfR6vUgkEnC73UIqdNMh+N6VSkU2MdwI8Drf6XTKGqHLgsPhQCQSQS6Xk4KKIyMj8vt0T3C73ejp6UE0GhWXItoVqsTcfFJJJ3lwI0XCo/Hdu3evrAnGL3DcOb8rlYqsUyq4qh89N9j0cecGiao0FUpgX2wE26eqcVwjXNcqSdG9hP1A28TvIxkB+1I7cwOlEkelUhE7xM9rHD40R2qO1BypOVJzpObIoz5wqadbukrwv5w4VJZ4BceX5VWv6gpB8uFzqGTQcNIo07DQyPv9flgsFhkIToJ4PI729nYMDw/D7/ejt7cXuVwOzc3NcLlcojSp7eEAVCoVJBIJ8Qfu7e2VjqZRZSBmZ2cnIpEIpk2bVqM4xuNxScmpKjoejwexWEyqkvf396NYLGL37t1IpVKw26vV5un2wXdhphtOetadGBoaQkNDA3p6esR/nJORrifjx48XQ0kj4PF4xEhls1l0dnYKadPY0ABzYptMJlFJeTVOP10G3+4vNWlHRweam5tlwtN3PBAIiEFgEU7OGyp8dFWgkkrlxWarFq9jBhxmPiJxjFaHVfcGKsW8XmYaYtb06O/vF5/jQCAgaVu5UVDflQalXC5LBicGuLIuBYObqeTQ5UAN+B0eHha1letHdQ+guqWqnwzu5bMzmQwCgQAKhQJisZgYXcOoBrt6vV4A1c1ELBaDy+WSec1+VBXsvr4+eL1edHd3Y2Skml2K86tUKskGgOvRYrGIIma1WiX7VzKZhMvlkqB4NSaDv5fNZhEIBKTifUNDA+rq6pBIJOBwOETppHsPg12paubzeSFLugI5nU709vbKd1F1ZipsbtbsdjsymYzYHf4BIHOdRTK56aCbQ6FQQDAYlBgS+uJzHFkfJ51Oo1KpiOsQY3qofFssFvT29oqSyJsJzjWuCW7I6WpEguLzqP653W4hG5fLJbaZGzHVFYSEwvXF8aN/OuMdNI4MmiM1R2qO1BypOfLE4EgKYfuDcbQuIuPHj6/cddddMlFV/1oaZ6oJbDjJg9/NjuFiZ3aScnlfth9+Xv0dXrOqk4EdpE7kcrksi4+DqVZ0p3JHdwXVeFosFsmixEnC72WAXS6XQyqVkqBIXt3yapKTPx6PS60QBjjy6jSZTEq2FJ7O2Qdut1uusJPJpPQTJw7dMiZMmIDu7m5MmjQJPT09smhI1IFAQBYRlQT+P6EGp/LqFNhnKEnkfDez2QyfzyftUY00x4qKRiKRgMViEb9jGlduHFTfdBIQlVyqULzSpepCVY7jTfcSto9qItvNVMb02+UmguRJA6c+y2q1isqkqpPcfNJVhG4YNARsA9UcEhE3RQ6HQxQivg99o9kGs9ks6U9J8uoVNr+D7QcggdPcJNE1iKqrukFgm+nDzzXkcrlqFCSqxVSyuFmgseacz+fzEi+ivqvZbJbAb8YMkED4+Xw+L2qsGlMAVInBMAypO1IoFMRVgSlZ2Qb2B+2N6pduGEZNOmuOCdNWk0zUGwZuKNg3hmHIeud78T05PrRbKhlxfXHM6fLAdcSbAFVVo+sC40z4M6rC/Dk3TPwdfifnAecF+4Vt4Oc5RpxH3Ehy8872FotFLFu27ONKpTL7iIjiBIbmSM2RmiM1R2qOPDE48t5770V7e/t+UxUe9YFr9uzZlQ8//HDfA0cZEvXfD/Zdh/q5hoaGhsYfHyaTSR+4jgCaIzU0NDRODMyZMwcfffTRfg9cY+IfwlOgSgZUFkb7M5Js+Ble/fE5PAGPBY6UoEarg6PbTKXnQL+rfnascDjPHf2ZI3nvsSLxI30OPz/69/b3nof6+YmAg809/lyF2q9H02fqM7g+xmoM1Gftb+4c6nsO9H5H8t5Hsk4O9vmDrcGx6LMjHcuj7ZuxHGcNzZH8mfrZsYLmSM2RgObIQ7XtcP79YM84FDRHHhpjcuCi4U0kEti8ebNcYU6cOBF+vx+xWAy7du3CuHHj0NraWhNslkgksHXrVgDApEmT0NjYeMCgs8MxWIcyTgd7BoMQAUjmk/09Y3+/zyvMI8GxNIxH8uyxJO+v8/lD/d6JSiAqvk4fjUW/qSRyoM0Wr+dVd4H9YfRzVL/s0Z9TN5UHWn/7W+dqG/fXlv0ZSW6GD9X+A70TsK8P+Bx+P11p1D8HAtsx+p3Unx3Oc9gudaPOf+P7H8i2aRwbaI7UHPl1nqM58vChObL25+p/1c9ojtzXrj80R47JgYuN3Lp1K/7sz/4MlUoFfr8fp59+Oq6//nps374dP/nJT7Bs2TIsX75cMhylUincf//9ePnll2EymTB9+nQ88sgjmDx58lcmzOjTMztFnew06OoAArUDQfAzKmKxGN5++23YbDacd955Ehg6+l3VdwZqiUQdMLVt/P7RP9OGUuObDM7fYrEoAbRut7umdksikUChUC2ASl/x/a2bSqWa1YwB3nwOA7jV9WC328VPfX9gMC1jSphyl0Hl5XJZ4jH4TKbDZYrpSqUaM5BIJFCpVNM9MwPV4fYNUC1GGovFUCqV4PP5JCMdM2dxc80A7/0prczgpP5cjblg3Z+6urr9Jq4YfStSqVQkBsZut8MwDIlvsdvtUltF26A/DDRHao7UOD6hOfLgfQNojiTGNOUUs8/MnDkT8+bNw7PPPotyuYxp06Yhk8nUZKoxmUxYtWoVfvOb3+CKK67ASSedhBUrVmDatGl46KGHJMNLPB6H1WpFfX29BFCmUikMDg7C5/NJkcFSqYSGhgaYzWbs2bMHANDc3Ix8Po+9e/eiUqmgoaFB0qH29fXB5XJJMJ/VasWGDRvw85//HG63G6FQCDNnzpS0pqPfM5lMIpVKoa6uDg0NDTCZqnUTEokEcrmcpJjt7++XQEgWiQyHw1IUThOKxjcVNGqFQgGbNm3Cyy+/jD179uDkk0/G1VdfjYaGBqxduxavvPIKstksvvOd7+Dqq69GMBismdd8Tj6fx4YNG/Dqq6+ir68Pp512GpYuXYpkMok33nhD7EOhUMDMmTNx4YUXfmUjBlSN9/bt2/H2228jm83i1FNPxfz582EYBtasWYPf/e53yGazOP/883HBBRdIqtdYLIbVq1dj6dKlkpXqrbfewquvvgqTyYQ5c+bgsssukwxgB1uXbMvIyAjeeecdvPDCCyiVSpgxYwauu+46OJ1OfPrpp3j33XeRz+cxc+ZMnH766fLs0e+VSCSwevVqCQ4uFos49dRTcdppp+HDDz/E//3f/0kb582bJ6R9oBuDnp4erFy5EnPmzMF5552HWCyG5557Dhs3bkQgEMBFF12EefPmCTFpG/SHgeZIzZEaxw80R2qOPBKM6YGLWW0mT56MH/7wh0gmk2hvb0ckEpETJD+Xy+XwxhtvwGw244477kB9fT2effZZ7NixQ9KPrly5Elu2bIHH48GSJUuwePFibNu2DS+88AI6Ojpwyimn4KqrrsJbb72FzZs349Zbb4XdbseDDz6Ik046CTfccAOee+45vPfeeyiVSpg/fz6uuOIK9PX14ZFHHkEkEsHg4CDq6+tRX1+P119/HR0dHbDb7fj5z3+O22+/HWeeeaZkJgGqg/fFF1/gP//zP/H555+jsbERl112GebMmYONGzfi2WefRTwex7x58zBz5ky89tpr6OjoAAAEg0H09PTg/PPPx/e//33JBKSh8U1FpVJNVfz444/jiy++QEtLC/71X/8VHo8HixYtwj333AObzYZp06bhb//2bzE0NIRbb731K1f15XIZX375JR599FF0d3ejqakJv/71r+FyuTBx4kT09PQgm82iq6sLr732Gm655RYhE5VIyuUy9u7diwcffBCbN29GfX09XnrpJdx6660wm814+OGH0djYiMHBQfz0pz9FW1sbJk+ejHfeeQevvfYaXn75ZZx33nkIh8PYuHEjVqxYgWg0inA4jF/84hfw+/24+OKLARyaTAzDwObNm/HDH/4QTU1NmDBhAn7605/C6/Xi9NNPx9///d+jo6MDHo8Hq1evxl133YXzzjsPQ0ND2L59O0KhEJqbm2G1WtHR0YEVK1Zg1qxZQnQ2mw1OpxMrVqxALpeDy+XCG2+8gfvuuw9nnHEGkskkdu7ciWg0isbGRsnmVi6X8cQTT+Af/uEf8KMf/QiLFi3CSy+9hLvvvhuXX345Nm7ciA8//BCPPvooTj31VL2h/QNCc6TmSI3jC5ojD9wvmiNrMaYHLvVq1ev1YvLkydi8eTPi8fhX/Fuz2Sz27NkjKRUB4Oabb4bP50OhUMAvf/lLPPHEE1i4cCHWr1+Pjo4OeL1ePPPMM3j//ffR0tKCJ598EpVKtXja888/j7POOgsOhwNPPfUUfvGLX+DNN9/EnXfeCZ/PBwBYs2YNxo0bh3A4jKeffhpWqxXTp0/HwoULYTKZxCedfx/t7gBUFYiHHnoIr7zyCs455xz87ne/w549e/CjH/0Ijz32GD799FO0tbXh17/+NTZv3oyPP/4Y6XRaaiEUi0Vs3rwZF154IRoaGsay+zU0xgyq8d60aRM+++wzLFu2DJdddhn+5m/+Bu+++y4mTJiAnTt34oEHHsB1112Ht99+G08++SRuvfXWr1zfUwHctm0bbrvtNixYsAB33HEH1q1bh0WLFuGBBx6AxWLBv/3bv2HdunU4//zzv+KHzTb19fXh448/xg9+8APMnTsX9957L959911xnbjrrrtQLpfx53/+53jvvfcwfvx47N69Gxs2bEAulxN17Pnnn8fg4CAee+wxlEol3HDDDVizZg2WLFlySFcCFkZ85pln0Nvbi9/+9rdoaWnBli1b8Pzzz6OpqQk7duzA8uXL0dLSggceeADr16/H2WefjVgshn//93/HmWeeicsvv7wmze3SpUsRCAQQDocxbdo0rFu3Dk6nE3feeSeKxSLuvPNOvP/++zjzzDPR3t6OlStX4rvf/S4WL14s/bVu3TqsXLmypsDjhg0bUKlU8NBDD2Ht2rVYvnw5Pv74YyETjT8MNEdqjtQ4PqA5UnPkkeKYVLGkvyXz1PNKTlXB1L9z4P73f/8X4XAY55xzDv7rv/4Lp512Gh5++GH8/ve/x9q1a7F582Zs3LgR3/ve93Dttdfivvvuw2uvvYbbb78dra2teOmllwAA4XAYF198MR544AEkEgmcfPLJKJfLaG9vl993uVw4++yzccstt6C5uRl+vx8nn3wyvvjiC3g8Htxzzz2YNGmStI3+79lsFqtXr8b8+fPx8MMP47XXXsOGDRvwwQcf4L333sOVV16Jv/7rv8ZPfvITbNmyBUNDQ1iyZAm2bduGXC6HdDqN9vZ2DAwMIBqNHovu19AYM/Aq32QyobGxEX6/H9OnT8ebb76JQqGA1tZWrF27FkC1+OTSpUulAr1qiIeHhxGLxeBwONDW1ob6+nq0tbVh69at6O/vR1NTE/L5PFavXo1x48bhT/7kT6Sex2hEo1H8+Mc/xqxZs9DV1QWTyYS6ujp0dnZi3LhxCAQC8Pv9aGpqwkcffYS//Mu/xLJly9DR0YHt27dL0cOdO3ciEAhg4sSJiMVimDZtGjo7O9Hf349oNHpYqtaOHTsQCoXQ1tYGi8WCBQsWYM2aNbDb7bjjjjswa9YsfPrppzCbzfD7/TCZTAgEArjgggvQ2toqtnHnzp0YHBzE22+/LYUX7777bpx22mm4++67EQqFsGrVKni9XrS0tEg/LFmyBJMmTRJ7mkwm8Y//+I+or6+vcdWaPn067HY7nnnmGXz++edoa2vDtGnTAGh3wj8GNEdqjtQ4PqA5UnPk4WLMD1x0hVi/fj3WrVuHYDCIaDQqxQe7u7thtVrhcDgwefJkbN26FTt27IDf78d//Md/4IILLpCCaiwuZrPZEI/H0dTUJAGCVMJyuRyi0SjmzJmD5557DoODg7j88ssRDAZlQi9atAhWqxUrV66U4D+Xy4UzzzwT8+bNk0nDauKsBt7V1YVoNIrt27fD5/Nh8uTJAKqdzwJ0iUQCu3fvht1uF5WiUCjUBCXTV51BeVQINTS+6RitoFksFkSjUSQSCQDVQqEbN27EwMAAMpkMzjjjDKxatQqbNm2Cw+GQjZjT6cTevXthsVikyGcwGEQymUQ2m4XJZMLu3bvxySef4OKLL4bVasU//dM/yXoh6urqcM011+AHP/gBtm7diieffBLhcBjnnnsunnrqKVl3Pp8PwWAQu3fvluKwJJH9vZfdbkc4HMbOnTsPa6OnboqBfTE3zc3NSCaTcLlcuPbaa7F+/Xr89re/RVtbG84880xYLBb4/X5ccsklAPYpklarFWeffTYWL16MXC6HG2+8EbNnz8aNN96IUCiE1atX44UXXkB9fb2QQGNjI5qamuQ5pVIJv/nNb/DFF1/ggQcewPLly4WsWltbUSqV8Oqrr6K3txcNDQ1obW3Vh60/AjRHao7UOH6gOfLA/QJojlQxpgcuVo3+4IMP0NXVhVQqhWXLlonq9eKLL2LTpk0AgLlz5+Kyyy7De++9h3vuuQdOpxNutxt/8Rd/Abvdjuuuuw6/+tWvcMsttyCVSsHn82HWrFlob2/HK6+8gg0bNqCrqwuXX345WlpacP755+Ppp5/G0NAQLrzwQphMJpx33nlYtWoV1q5dC6fTiWw2i9mzZ0smGDVzkmEYaGxsRFtbG95//3383d/9HebOnYtLL70Ud999NyZOnIjHH38cLpcLF1xwAVatWoWbb74ZX3zxBcaPH49Fixahvb0da9aswY4dO5BIJDB//nx89NFHGBkZQS6XQz6fl/9qNx6NbwvUuVosFtHV1YXGxkZs2rQJu3btwr333ou2tjbs2rULK1euxF/91V8hGo3CZrOJ8bZarUJAhmEgm80iHo8jFApJkOy6deswODgom7/6+noMDw/XGDyHwwGz2YzPPvsMP/vZzxCPx3Hrrbdi6tSpNVnVkskk4vG4qGr7eyd1w5fL5RCLxRAKheD1eg/byPIZ9A3v6OhAMBhEIBDAhg0b8NBDD6FcLuPmm2/GlClTAOzLBGWxWGC321Eul9HW1obbbrsNc+fORU9PD+x2O3p6ehCLxTAwMIBTTz0VV199NZ5//nmsX78e06dPR6FQkBsSfn7lypVwOBzYuXMnhoeHsXHjRqxevRr//d//jSlTpuC2227D1q1b8fjjj+ODDz7ApZdeikqlcsBMVxpjC82RmiM1jj9ojjwwNEfuw5geuAKBAJYsWQKHw4GmpibMmDEDF1xwATZu3Ihzzz0Xg4OD4kpRKpVw+umn48EHH8SLL76IZDKJ+++/H9/97ndhNptx0003wefzYdOmTZg+fTouueQSzJ07F8FgEOFwGJ2dnbjkkktwySWXIBQK4YwzzsCVV16JkZERLFiwAIZhYPHixXj44YexZs0aFAoF3HHHHfje976HZDKJ73//+zjllFNqTvGRSATLly9HMBhEX18fpk2bBo/Hg+nTp6OlpQWVSgV2ux0//vGP0dzcjM2bN2Px4sW46qqrMGvWLIRCIbz00kvo6OjAlVdeiblz56KtrQ0nnXQSotGopK6MxWKygDQ0vskwDANNTU2wWCzYu3cvBgcHsW3bNjQ3N2NwcBDJZBITJkzAnDlzYLfb8cknn+Dcc8/FWWedVfOccrmMN998E2+//TZ27tyJ1tZWdHZ2oqWlBcFgEMPDw1izZg0ikQimT58Oh8OBq6++umazR2QyGTz++OMYGBjAihUrMG/ePJTLZbS0tOCDDz5AIpFAPB5HT08PLr30UlitVhhGta6G2WwW4zlr1iysX78eX375JUqlEj777DPMnTsXoVDosPtn9uzZWLNmDdrb29Ha2orf//73aGtrQ7FYxOOPPw6z2Yzbb78dM2fOlDicrq4uPPbYY5g7dy6WLl0Km82Gf/7nf0apVMK8efMki1tdXR1+9atfYfv27Xj00UexcOFCrFq1Ch0dHTAMA1u3bsVTTz2FhQsX4vzzz5dsd7lcDh9++CHK5TL27NmDXbt24bPPPoPf78ef/umfolAoIJlMoru7ewxmiMaRQHOk5kiN4wuaIw8OzZH7MCYHLhrkKVOm4NFHH4XZbBY1rq6uDmeccQamTp1aU/isrq4O4XAYS5cuxbx581AoFNDQ0ACPx4NKpYJx48bhxhtvRCqVgtPpRCgUgtlsxowZM9DW1oahoSH4/X5JHRsIBHDHHXcAAEKhEEwmE7xeL6666iosXLgQ5XIZwWBQ2nTLLbfA5XLVTFKTyYR58+Zh0qRJyOfzCIVC4mdqs9nkPSdOnIibbroJqVQKDocDkUgEZrMZM2fOxEknnYRsNgu/3w+n04nx48fDZrOJn2i5XEaxWJQJq116NL6JUNMxn3baaTjllFPwxBNP4JVXXsEnn3yCFStWYOrUqXjmmWdw22234dRTT8Unn3yCa665Bna7HVar9SsFBWfMmIHp06fjl7/8JZ577jl0dXXhoosuQigUwvbt27F+/XosWLAAkUgEAOByuWqUQ6rub731Fl588UWcfPLJePPNN/H6669j/vz5OOecc7B27Vrce++9yGQycDqdWLBggaj0w8PDUqvDbDZj6dKlePrpp7Fs2TI0NDQgl8th8eLFEpNyMJcmrtsrr7wSTz31FK6//npMmTIFu3fvxk033YR33nkHr7/+OmbMmIH/+Z//wcsvv4yFCxfirLPOgs1mw4QJExCJRITgvvOd7+Cee+6B1+vFtm3b4PP5sGjRImzZsgWPPPII/H4/gGoihebmZumftrY2BINBmEwmRKNR3H///SgUCshms3jppZcwf/58XHbZZejq6sK//Mu/4MYbb0RnZydcLhemTp06tpNG44DQHKk5UuP4guZIzZFHCvN99913VA944okn7lu+fDkMwxDfS6/XC5fLJQbYbrfD5/PB7/cjEAggEAjA6/VKsLDX64Xf75esRzxp2+12eRYD3sxmMxwOB7xeb00aXcMw4Ha74fF4aq7+LBYL3G53zedJdpzwqoJnNpvhcrng8/mk+JvT6YTT6aypPM22sbgdUCUjp9Mp32UymeBwOGC32+FwOOBwOOB0OmsKs2ky0fgmg+tq/PjxMAwDhUIBF198MZYuXYpJkyZh6tSp4gZ00UUX4aabbhJ3g9F/PB4PJkyYIC4GS5cuxYUXXohAIIBEIoGBgQFceeWVmDhxoqx39Q99sHft2gWz2YxoNCpxIo2NjViwYAHGjx+PgYEB+Hw+3HDDDZg3b56s82QyCb/fjyVLlsgaP/nkk5FOp+HxeHDttddi8eLFNRvHg6FSqRavnTFjBvr6+mA2m3HNNdfg4osvxp49e2C1WhGJRKSNbW1tmDhxIjweD6ZNm4a2tjbY7XYA1YDdpqYmdHd3o6GhAbfffjvOOOMMjB8/HlarFVu2bMHAwACuuOIKXH755airq4PX68W0adPQ3Nwstsrj8SAYDMJms+GDDz7AOeecg3PPPRennHIK6uvrkUgk0NjYiOuvvx7nnHPOYb+rivvvv7/nvvvue+KoJtYJBM2RmiM1jl9ojjwwTkSOfOKJJ3DDDTfcv9+5crR+0rNnz6589NFH0rlf+QLDOGJf7AP9zsH+ffT3H+4zRnfiodp6pG071LM0NL7JYEAvq9aPjIzIpsgwDHEBKpVKsNvtqKurq9l0qc8BqgUbh4eHUSwWZYNlMplEcaqrq5Nq8+oz+PtU4XK5XM1647OKxSKGhoZQqVTgdrvFVQIAhoaGkMvlJBMS03MPDg6iUqmgrq6uZkN7OH0DVP3N0+m0BD7bbDYhELWN3EgCqPGNp6/8yMiIBEB7PB7px2w2K6qj2+2WWweOzf7aWyqVEIvFUFdXB4/Hg3K5jHw+j+HhYclY9XUOW/9/TD6uVCqzj+iXTmBojjz4vx/qWRoa32Rojjx43wAnFkfOnj0bH3300X5/YUwPXBoaGscf1PpANF40dGpgLQBRww/1HOCrWZBUA3u4z+B/+b0Her5hy11OXQAAAj1JREFUVONiRrtBjA4MPlLjyu9i4DPbMrqd/Hc1CcFosH3q+6j9Qhyoffsj79HP589G31ocCfSB68igOVJD4/iG5siD9w3bThzPHHmwA9cxqcOloaFx/EA1yPv798PN3nOw5xzKH3z05/dnBA/VztFkQWJU2/91FfXRbT+YoT7Qv+/vGfvrl8Np4/5uE77OczQ0NDQ0Dg7NkYeG5kh94NLQ0DgMHKlhPFbPOdTnDvbzA5HM0WAsDfLBCHKsnqehoaGhMfbQHPn12nO0z/o2caSuLKihoaGhoaGhoaGhoXGMcNQxXIZhxAB0jE1zNDQ0NDS+4RhfqVQif+xGfFugOVJDQ0PjhMEB+fGoD1waGhoaGhoaGhoaGhoa+4d2KdTQ0NDQ0NDQ0NDQ0DhG0AcuDQ0NDQ0NDQ0NDQ2NYwR94NLQ0NDQ0NDQ0NDQ0DhG0AcuDQ0NDQ0NDQ0NDQ2NYwR94NLQ0NDQ0NDQ0NDQ0DhG0AcuDQ0NDQ0NDQ0NDQ2NY4SjPnAZhnH6WDTkjwXDMOYZhtHwx27H14FhGKd/i9t+tmEYgT92O74uDMM441vc9+d8y/v+rG9r3wPf/v7XODJ8mzlS8+MfD99mjvw28yPw7bfR32aO/Lb3/cGg63BpaGhoaGhoaGhoaGgcI2iXQg0NDQ0NDQ0NDQ0NjWMEfeDS0NDQ0NDQ0NDQ0NA4RtAHLg0NDQ0NDQ0NDQ0NjWMEfeDS0NDQ0NDQ0NDQ0NA4RtAHLg0NDQ0NDQ0NDQ0NjWOE/weewGggFgupFgAAAABJRU5ErkJggg==\n",
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {
+ "tags": [],
+ "needs_background": "light"
+ }
+ }
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "id": "lMombPr0GF9a",
+ "colab_type": "text"
+ },
+ "source": [
+ "The images used in this demo are from the [Snapshot Serengeti dataset](http://lila.science/datasets/snapshot-serengeti), and released under the [Community Data License Agreement (permissive variant)](https://cdla.io/permissive-1-0/)."
+ ]
+ }
+ ]
+}
\ No newline at end of file
diff --git a/research/object_detection/object_detection_tutorial.ipynb b/research/object_detection/colab_tutorials/object_detection_tutorial.ipynb
similarity index 98%
rename from research/object_detection/object_detection_tutorial.ipynb
rename to research/object_detection/colab_tutorials/object_detection_tutorial.ipynb
index c83b67ede32938c40596e2cc2fced0ab1ae952bb..9063f2cd33aa8fffe160b138b3a3ec69c0d3abdb 100644
--- a/research/object_detection/object_detection_tutorial.ipynb
+++ b/research/object_detection/colab_tutorials/object_detection_tutorial.ipynb
@@ -10,11 +10,11 @@
"# Object Detection API Demo\n",
"\n",
"\u003ctable align=\"left\"\u003e\u003ctd\u003e\n",
- " \u003ca target=\"_blank\" href=\"https://colab.sandbox.google.com/github/tensorflow/models/blob/master/research/object_detection/object_detection_tutorial.ipynb\"\u003e\n",
+ " \u003ca target=\"_blank\" href=\"https://colab.sandbox.google.com/github/tensorflow/models/blob/master/research/object_detection/colab_tutorials/colab_tutorials/object_detection_tutorial.ipynb\"\u003e\n",
" \u003cimg src=\"https://www.tensorflow.org/images/colab_logo_32px.png\" /\u003eRun in Google Colab\n",
" \u003c/a\u003e\n",
"\u003c/td\u003e\u003ctd\u003e\n",
- " \u003ca target=\"_blank\" href=\"https://github.com/tensorflow/models/blob/master/research/object_detection/object_detection_tutorial.ipynb\"\u003e\n",
+ " \u003ca target=\"_blank\" href=\"https://github.com/tensorflow/models/blob/master/research/object_detection/colab_tutorials/colab_tutorials/object_detection_tutorial.ipynb\"\u003e\n",
" \u003cimg width=32px src=\"https://www.tensorflow.org/images/GitHub-Mark-32px.png\" /\u003eView source on GitHub\u003c/a\u003e\n",
"\u003c/td\u003e\u003c/table\u003e"
]
diff --git a/research/object_detection/core/batch_multiclass_nms_test.py b/research/object_detection/core/batch_multiclass_nms_test.py
index d99116a4bf086107e08fec70c26089d6fa9c2cbf..06f17103b2b6bd7df5d449a270f0bddfd3514249 100644
--- a/research/object_detection/core/batch_multiclass_nms_test.py
+++ b/research/object_detection/core/batch_multiclass_nms_test.py
@@ -27,21 +27,20 @@ from object_detection.utils import test_case
class BatchMulticlassNonMaxSuppressionTest(test_case.TestCase,
parameterized.TestCase):
- @parameterized.named_parameters(('', False), ('_use_static_shapes', True))
- def test_batch_multiclass_nms_with_batch_size_1(self, use_static_shapes):
- boxes = tf.constant([[[[0, 0, 1, 1], [0, 0, 4, 5]],
- [[0, 0.1, 1, 1.1], [0, 0.1, 2, 1.1]],
- [[0, -0.1, 1, 0.9], [0, -0.1, 1, 0.9]],
- [[0, 10, 1, 11], [0, 10, 1, 11]],
- [[0, 10.1, 1, 11.1], [0, 10.1, 1, 11.1]],
- [[0, 100, 1, 101], [0, 100, 1, 101]],
- [[0, 1000, 1, 1002], [0, 999, 2, 1004]],
- [[0, 1000, 1, 1002.1], [0, 999, 2, 1002.7]]]],
- tf.float32)
- scores = tf.constant([[[.9, 0.01], [.75, 0.05],
- [.6, 0.01], [.95, 0],
- [.5, 0.01], [.3, 0.01],
- [.01, .85], [.01, .5]]])
+ def test_batch_multiclass_nms_with_batch_size_1(self):
+ boxes = np.array([[[[0, 0, 1, 1], [0, 0, 4, 5]],
+ [[0, 0.1, 1, 1.1], [0, 0.1, 2, 1.1]],
+ [[0, -0.1, 1, 0.9], [0, -0.1, 1, 0.9]],
+ [[0, 10, 1, 11], [0, 10, 1, 11]],
+ [[0, 10.1, 1, 11.1], [0, 10.1, 1, 11.1]],
+ [[0, 100, 1, 101], [0, 100, 1, 101]],
+ [[0, 1000, 1, 1002], [0, 999, 2, 1004]],
+ [[0, 1000, 1, 1002.1], [0, 999, 2, 1002.7]]]],
+ np.float32)
+ scores = np.array([[[.9, 0.01], [.75, 0.05],
+ [.6, 0.01], [.95, 0],
+ [.5, 0.01], [.3, 0.01],
+ [.01, .85], [.01, .5]]], np.float32)
score_thresh = 0.1
iou_thresh = .5
max_output_size = 4
@@ -52,56 +51,51 @@ class BatchMulticlassNonMaxSuppressionTest(test_case.TestCase,
[0, 100, 1, 101]]]
exp_nms_scores = [[.95, .9, .85, .3]]
exp_nms_classes = [[0, 0, 1, 0]]
-
- (nmsed_boxes, nmsed_scores, nmsed_classes, nmsed_masks,
- nmsed_additional_fields,
- num_detections) = post_processing.batch_multiclass_non_max_suppression(
- boxes,
- scores,
- score_thresh,
- iou_thresh,
- max_size_per_class=max_output_size,
- max_total_size=max_output_size,
- use_static_shapes=use_static_shapes)
-
- self.assertIsNone(nmsed_masks)
- self.assertIsNone(nmsed_additional_fields)
-
- with self.test_session() as sess:
- (nmsed_boxes, nmsed_scores, nmsed_classes,
- num_detections) = sess.run([nmsed_boxes, nmsed_scores, nmsed_classes,
- num_detections])
- self.assertAllClose(nmsed_boxes, exp_nms_corners)
- self.assertAllClose(nmsed_scores, exp_nms_scores)
- self.assertAllClose(nmsed_classes, exp_nms_classes)
- self.assertEqual(num_detections, [4])
+ def graph_fn(boxes, scores):
+ (nmsed_boxes, nmsed_scores, nmsed_classes, nmsed_masks,
+ nmsed_additional_fields, num_detections
+ ) = post_processing.batch_multiclass_non_max_suppression(
+ boxes, scores, score_thresh, iou_thresh,
+ max_size_per_class=max_output_size,
+ max_total_size=max_output_size)
+ self.assertIsNone(nmsed_masks)
+ self.assertIsNone(nmsed_additional_fields)
+ return (nmsed_boxes, nmsed_scores, nmsed_classes, num_detections)
+
+ (nmsed_boxes, nmsed_scores, nmsed_classes,
+ num_detections) = self.execute_cpu(graph_fn, [boxes, scores])
+ self.assertAllClose(nmsed_boxes, exp_nms_corners)
+ self.assertAllClose(nmsed_scores, exp_nms_scores)
+ self.assertAllClose(nmsed_classes, exp_nms_classes)
+ self.assertEqual(num_detections, [4])
def test_batch_iou_with_negative_data(self):
- boxes = tf.constant([[[0, -0.01, 0.1, 1.1], [0, 0.2, 0.2, 5.0],
- [0, -0.01, 0.1, 1.], [-1, -1, -1, -1]]], tf.float32)
- iou = post_processing.batch_iou(boxes, boxes)
+ def graph_fn():
+ boxes = tf.constant([[[0, -0.01, 0.1, 1.1], [0, 0.2, 0.2, 5.0],
+ [0, -0.01, 0.1, 1.], [-1, -1, -1, -1]]], tf.float32)
+ iou = post_processing.batch_iou(boxes, boxes)
+ return iou
+ iou = self.execute_cpu(graph_fn, [])
expected_iou = [[[0.99999994, 0.0917431, 0.9099099, -1.],
[0.0917431, 1., 0.08154944, -1.],
[0.9099099, 0.08154944, 1., -1.], [-1., -1., -1., -1.]]]
- with self.test_session() as sess:
- iou = sess.run(iou)
- self.assertAllClose(iou, expected_iou)
+ self.assertAllClose(iou, expected_iou)
@parameterized.parameters(False, True)
def test_batch_multiclass_nms_with_batch_size_2(self, use_dynamic_map_fn):
- boxes = tf.constant([[[[0, 0, 1, 1], [0, 0, 4, 5]],
- [[0, 0.1, 1, 1.1], [0, 0.1, 2, 1.1]],
- [[0, -0.1, 1, 0.9], [0, -0.1, 1, 0.9]],
- [[0, 10, 1, 11], [0, 10, 1, 11]]],
- [[[0, 10.1, 1, 11.1], [0, 10.1, 1, 11.1]],
- [[0, 100, 1, 101], [0, 100, 1, 101]],
- [[0, 1000, 1, 1002], [0, 999, 2, 1004]],
- [[0, 1000, 1, 1002.1], [0, 999, 2, 1002.7]]]],
- tf.float32)
- scores = tf.constant([[[.9, 0.01], [.75, 0.05],
- [.6, 0.01], [.95, 0]],
- [[.5, 0.01], [.3, 0.01],
- [.01, .85], [.01, .5]]])
+ boxes = np.array([[[[0, 0, 1, 1], [0, 0, 4, 5]],
+ [[0, 0.1, 1, 1.1], [0, 0.1, 2, 1.1]],
+ [[0, -0.1, 1, 0.9], [0, -0.1, 1, 0.9]],
+ [[0, 10, 1, 11], [0, 10, 1, 11]]],
+ [[[0, 10.1, 1, 11.1], [0, 10.1, 1, 11.1]],
+ [[0, 100, 1, 101], [0, 100, 1, 101]],
+ [[0, 1000, 1, 1002], [0, 999, 2, 1004]],
+ [[0, 1000, 1, 1002.1], [0, 999, 2, 1002.7]]]],
+ np.float32)
+ scores = np.array([[[.9, 0.01], [.75, 0.05],
+ [.6, 0.01], [.95, 0]],
+ [[.5, 0.01], [.3, 0.01],
+ [.01, .85], [.01, .5]]], np.float32)
score_thresh = 0.1
iou_thresh = .5
max_output_size = 4
@@ -118,49 +112,48 @@ class BatchMulticlassNonMaxSuppressionTest(test_case.TestCase,
[.85, .5, .3, 0]])
exp_nms_classes = np.array([[0, 0, 0, 0],
[1, 0, 0, 0]])
-
- (nmsed_boxes, nmsed_scores, nmsed_classes, nmsed_masks,
- nmsed_additional_fields, num_detections
- ) = post_processing.batch_multiclass_non_max_suppression(
- boxes, scores, score_thresh, iou_thresh,
- max_size_per_class=max_output_size, max_total_size=max_output_size,
- use_dynamic_map_fn=use_dynamic_map_fn)
-
- self.assertIsNone(nmsed_masks)
- self.assertIsNone(nmsed_additional_fields)
- # Check static shapes
- self.assertAllEqual(nmsed_boxes.shape.as_list(),
- exp_nms_corners.shape)
- self.assertAllEqual(nmsed_scores.shape.as_list(),
- exp_nms_scores.shape)
- self.assertAllEqual(nmsed_classes.shape.as_list(),
- exp_nms_classes.shape)
- self.assertEqual(num_detections.shape.as_list(), [2])
-
- with self.test_session() as sess:
- (nmsed_boxes, nmsed_scores, nmsed_classes,
- num_detections) = sess.run([nmsed_boxes, nmsed_scores, nmsed_classes,
- num_detections])
- self.assertAllClose(nmsed_boxes, exp_nms_corners)
- self.assertAllClose(nmsed_scores, exp_nms_scores)
- self.assertAllClose(nmsed_classes, exp_nms_classes)
- self.assertAllClose(num_detections, [2, 3])
+ def graph_fn(boxes, scores):
+ (nmsed_boxes, nmsed_scores, nmsed_classes, nmsed_masks,
+ nmsed_additional_fields, num_detections
+ ) = post_processing.batch_multiclass_non_max_suppression(
+ boxes, scores, score_thresh, iou_thresh,
+ max_size_per_class=max_output_size,
+ max_total_size=max_output_size,
+ use_dynamic_map_fn=use_dynamic_map_fn)
+ self.assertIsNone(nmsed_masks)
+ self.assertIsNone(nmsed_additional_fields)
+ # Check static shapes
+ self.assertAllEqual(nmsed_boxes.shape.as_list(),
+ exp_nms_corners.shape)
+ self.assertAllEqual(nmsed_scores.shape.as_list(),
+ exp_nms_scores.shape)
+ self.assertAllEqual(nmsed_classes.shape.as_list(),
+ exp_nms_classes.shape)
+ self.assertEqual(num_detections.shape.as_list(), [2])
+ return (nmsed_boxes, nmsed_scores, nmsed_classes, num_detections)
+
+ (nmsed_boxes, nmsed_scores, nmsed_classes,
+ num_detections) = self.execute_cpu(graph_fn, [boxes, scores])
+ self.assertAllClose(nmsed_boxes, exp_nms_corners)
+ self.assertAllClose(nmsed_scores, exp_nms_scores)
+ self.assertAllClose(nmsed_classes, exp_nms_classes)
+ self.assertAllClose(num_detections, [2, 3])
def test_batch_multiclass_nms_with_per_batch_clip_window(self):
- boxes = tf.constant([[[[0, 0, 1, 1], [0, 0, 4, 5]],
- [[0, 0.1, 1, 1.1], [0, 0.1, 2, 1.1]],
- [[0, -0.1, 1, 0.9], [0, -0.1, 1, 0.9]],
- [[0, 10, 1, 11], [0, 10, 1, 11]]],
- [[[0, 10.1, 1, 11.1], [0, 10.1, 1, 11.1]],
- [[0, 100, 1, 101], [0, 100, 1, 101]],
- [[0, 1000, 1, 1002], [0, 999, 2, 1004]],
- [[0, 1000, 1, 1002.1], [0, 999, 2, 1002.7]]]],
- tf.float32)
- scores = tf.constant([[[.9, 0.01], [.75, 0.05],
- [.6, 0.01], [.95, 0]],
- [[.5, 0.01], [.3, 0.01],
- [.01, .85], [.01, .5]]])
- clip_window = tf.constant([0., 0., 200., 200.])
+ boxes = np.array([[[[0, 0, 1, 1], [0, 0, 4, 5]],
+ [[0, 0.1, 1, 1.1], [0, 0.1, 2, 1.1]],
+ [[0, -0.1, 1, 0.9], [0, -0.1, 1, 0.9]],
+ [[0, 10, 1, 11], [0, 10, 1, 11]]],
+ [[[0, 10.1, 1, 11.1], [0, 10.1, 1, 11.1]],
+ [[0, 100, 1, 101], [0, 100, 1, 101]],
+ [[0, 1000, 1, 1002], [0, 999, 2, 1004]],
+ [[0, 1000, 1, 1002.1], [0, 999, 2, 1002.7]]]],
+ np.float32)
+ scores = np.array([[[.9, 0.01], [.75, 0.05],
+ [.6, 0.01], [.95, 0]],
+ [[.5, 0.01], [.3, 0.01],
+ [.01, .85], [.01, .5]]], np.float32)
+ clip_window = np.array([0., 0., 200., 200.], np.float32)
score_thresh = 0.1
iou_thresh = .5
max_output_size = 4
@@ -177,50 +170,48 @@ class BatchMulticlassNonMaxSuppressionTest(test_case.TestCase,
[.5, .3, 0, 0]])
exp_nms_classes = np.array([[0, 0, 0, 0],
[0, 0, 0, 0]])
-
- (nmsed_boxes, nmsed_scores, nmsed_classes, nmsed_masks,
- nmsed_additional_fields, num_detections
- ) = post_processing.batch_multiclass_non_max_suppression(
- boxes, scores, score_thresh, iou_thresh,
- max_size_per_class=max_output_size, max_total_size=max_output_size,
- clip_window=clip_window)
-
- self.assertIsNone(nmsed_masks)
- self.assertIsNone(nmsed_additional_fields)
- # Check static shapes
- self.assertAllEqual(nmsed_boxes.shape.as_list(),
- exp_nms_corners.shape)
- self.assertAllEqual(nmsed_scores.shape.as_list(),
- exp_nms_scores.shape)
- self.assertAllEqual(nmsed_classes.shape.as_list(),
- exp_nms_classes.shape)
- self.assertEqual(num_detections.shape.as_list(), [2])
-
- with self.test_session() as sess:
- (nmsed_boxes, nmsed_scores, nmsed_classes,
- num_detections) = sess.run([nmsed_boxes, nmsed_scores, nmsed_classes,
- num_detections])
- self.assertAllClose(nmsed_boxes, exp_nms_corners)
- self.assertAllClose(nmsed_scores, exp_nms_scores)
- self.assertAllClose(nmsed_classes, exp_nms_classes)
- self.assertAllClose(num_detections, [2, 2])
+ def graph_fn(boxes, scores, clip_window):
+ (nmsed_boxes, nmsed_scores, nmsed_classes, nmsed_masks,
+ nmsed_additional_fields, num_detections
+ ) = post_processing.batch_multiclass_non_max_suppression(
+ boxes, scores, score_thresh, iou_thresh,
+ max_size_per_class=max_output_size, max_total_size=max_output_size,
+ clip_window=clip_window)
+ self.assertIsNone(nmsed_masks)
+ self.assertIsNone(nmsed_additional_fields)
+ # Check static shapes
+ self.assertAllEqual(nmsed_boxes.shape.as_list(),
+ exp_nms_corners.shape)
+ self.assertAllEqual(nmsed_scores.shape.as_list(),
+ exp_nms_scores.shape)
+ self.assertAllEqual(nmsed_classes.shape.as_list(),
+ exp_nms_classes.shape)
+ self.assertEqual(num_detections.shape.as_list(), [2])
+ return nmsed_boxes, nmsed_scores, nmsed_classes, num_detections
+
+ (nmsed_boxes, nmsed_scores, nmsed_classes,
+ num_detections) = self.execute_cpu(graph_fn, [boxes, scores, clip_window])
+ self.assertAllClose(nmsed_boxes, exp_nms_corners)
+ self.assertAllClose(nmsed_scores, exp_nms_scores)
+ self.assertAllClose(nmsed_classes, exp_nms_classes)
+ self.assertAllClose(num_detections, [2, 2])
def test_batch_multiclass_nms_with_per_image_clip_window(self):
- boxes = tf.constant([[[[0, 0, 1, 1], [0, 0, 4, 5]],
- [[0, 0.1, 1, 1.1], [0, 0.1, 2, 1.1]],
- [[0, -0.1, 1, 0.9], [0, -0.1, 1, 0.9]],
- [[0, 10, 1, 11], [0, 10, 1, 11]]],
- [[[0, 10.1, 1, 11.1], [0, 10.1, 1, 11.1]],
- [[0, 100, 1, 101], [0, 100, 1, 101]],
- [[0, 1000, 1, 1002], [0, 999, 2, 1004]],
- [[0, 1000, 1, 1002.1], [0, 999, 2, 1002.7]]]],
- tf.float32)
- scores = tf.constant([[[.9, 0.01], [.75, 0.05],
- [.6, 0.01], [.95, 0]],
- [[.5, 0.01], [.3, 0.01],
- [.01, .85], [.01, .5]]])
- clip_window = tf.constant([[0., 0., 5., 5.],
- [0., 0., 200., 200.]])
+ boxes = np.array([[[[0, 0, 1, 1], [0, 0, 4, 5]],
+ [[0, 0.1, 1, 1.1], [0, 0.1, 2, 1.1]],
+ [[0, -0.1, 1, 0.9], [0, -0.1, 1, 0.9]],
+ [[0, 10, 1, 11], [0, 10, 1, 11]]],
+ [[[0, 10.1, 1, 11.1], [0, 10.1, 1, 11.1]],
+ [[0, 100, 1, 101], [0, 100, 1, 101]],
+ [[0, 1000, 1, 1002], [0, 999, 2, 1004]],
+ [[0, 1000, 1, 1002.1], [0, 999, 2, 1002.7]]]],
+ np.float32)
+ scores = np.array([[[.9, 0.01], [.75, 0.05],
+ [.6, 0.01], [.95, 0]],
+ [[.5, 0.01], [.3, 0.01],
+ [.01, .85], [.01, .5]]], np.float32)
+ clip_window = np.array([[0., 0., 5., 5.],
+ [0., 0., 200., 200.]], np.float32)
score_thresh = 0.1
iou_thresh = .5
max_output_size = 4
@@ -238,56 +229,55 @@ class BatchMulticlassNonMaxSuppressionTest(test_case.TestCase,
exp_nms_classes = np.array([[0, 0, 0, 0],
[0, 0, 0, 0]])
- (nmsed_boxes, nmsed_scores, nmsed_classes, nmsed_masks,
- nmsed_additional_fields, num_detections
- ) = post_processing.batch_multiclass_non_max_suppression(
- boxes, scores, score_thresh, iou_thresh,
- max_size_per_class=max_output_size, max_total_size=max_output_size,
- clip_window=clip_window)
-
- self.assertIsNone(nmsed_masks)
- self.assertIsNone(nmsed_additional_fields)
- # Check static shapes
- self.assertAllEqual(nmsed_boxes.shape.as_list(),
- exp_nms_corners.shape)
- self.assertAllEqual(nmsed_scores.shape.as_list(),
- exp_nms_scores.shape)
- self.assertAllEqual(nmsed_classes.shape.as_list(),
- exp_nms_classes.shape)
- self.assertEqual(num_detections.shape.as_list(), [2])
-
- with self.test_session() as sess:
- (nmsed_boxes, nmsed_scores, nmsed_classes,
- num_detections) = sess.run([nmsed_boxes, nmsed_scores, nmsed_classes,
- num_detections])
- self.assertAllClose(nmsed_boxes, exp_nms_corners)
- self.assertAllClose(nmsed_scores, exp_nms_scores)
- self.assertAllClose(nmsed_classes, exp_nms_classes)
- self.assertAllClose(num_detections, [1, 2])
+ def graph_fn(boxes, scores, clip_window):
+ (nmsed_boxes, nmsed_scores, nmsed_classes, nmsed_masks,
+ nmsed_additional_fields, num_detections
+ ) = post_processing.batch_multiclass_non_max_suppression(
+ boxes, scores, score_thresh, iou_thresh,
+ max_size_per_class=max_output_size, max_total_size=max_output_size,
+ clip_window=clip_window)
+ self.assertIsNone(nmsed_masks)
+ self.assertIsNone(nmsed_additional_fields)
+ # Check static shapes
+ self.assertAllEqual(nmsed_boxes.shape.as_list(),
+ exp_nms_corners.shape)
+ self.assertAllEqual(nmsed_scores.shape.as_list(),
+ exp_nms_scores.shape)
+ self.assertAllEqual(nmsed_classes.shape.as_list(),
+ exp_nms_classes.shape)
+ self.assertEqual(num_detections.shape.as_list(), [2])
+ return nmsed_boxes, nmsed_scores, nmsed_classes, num_detections
+
+ (nmsed_boxes, nmsed_scores, nmsed_classes,
+ num_detections) = self.execute_cpu(graph_fn, [boxes, scores, clip_window])
+ self.assertAllClose(nmsed_boxes, exp_nms_corners)
+ self.assertAllClose(nmsed_scores, exp_nms_scores)
+ self.assertAllClose(nmsed_classes, exp_nms_classes)
+ self.assertAllClose(num_detections, [1, 2])
def test_batch_multiclass_nms_with_masks(self):
- boxes = tf.constant([[[[0, 0, 1, 1], [0, 0, 4, 5]],
- [[0, 0.1, 1, 1.1], [0, 0.1, 2, 1.1]],
- [[0, -0.1, 1, 0.9], [0, -0.1, 1, 0.9]],
- [[0, 10, 1, 11], [0, 10, 1, 11]]],
- [[[0, 10.1, 1, 11.1], [0, 10.1, 1, 11.1]],
- [[0, 100, 1, 101], [0, 100, 1, 101]],
- [[0, 1000, 1, 1002], [0, 999, 2, 1004]],
- [[0, 1000, 1, 1002.1], [0, 999, 2, 1002.7]]]],
- tf.float32)
- scores = tf.constant([[[.9, 0.01], [.75, 0.05],
- [.6, 0.01], [.95, 0]],
- [[.5, 0.01], [.3, 0.01],
- [.01, .85], [.01, .5]]])
- masks = tf.constant([[[[[0, 1], [2, 3]], [[1, 2], [3, 4]]],
- [[[2, 3], [4, 5]], [[3, 4], [5, 6]]],
- [[[4, 5], [6, 7]], [[5, 6], [7, 8]]],
- [[[6, 7], [8, 9]], [[7, 8], [9, 10]]]],
- [[[[8, 9], [10, 11]], [[9, 10], [11, 12]]],
- [[[10, 11], [12, 13]], [[11, 12], [13, 14]]],
- [[[12, 13], [14, 15]], [[13, 14], [15, 16]]],
- [[[14, 15], [16, 17]], [[15, 16], [17, 18]]]]],
- tf.float32)
+ boxes = np.array([[[[0, 0, 1, 1], [0, 0, 4, 5]],
+ [[0, 0.1, 1, 1.1], [0, 0.1, 2, 1.1]],
+ [[0, -0.1, 1, 0.9], [0, -0.1, 1, 0.9]],
+ [[0, 10, 1, 11], [0, 10, 1, 11]]],
+ [[[0, 10.1, 1, 11.1], [0, 10.1, 1, 11.1]],
+ [[0, 100, 1, 101], [0, 100, 1, 101]],
+ [[0, 1000, 1, 1002], [0, 999, 2, 1004]],
+ [[0, 1000, 1, 1002.1], [0, 999, 2, 1002.7]]]],
+ np.float32)
+ scores = np.array([[[.9, 0.01], [.75, 0.05],
+ [.6, 0.01], [.95, 0]],
+ [[.5, 0.01], [.3, 0.01],
+ [.01, .85], [.01, .5]]], np.float32)
+ masks = np.array([[[[[0, 1], [2, 3]], [[1, 2], [3, 4]]],
+ [[[2, 3], [4, 5]], [[3, 4], [5, 6]]],
+ [[[4, 5], [6, 7]], [[5, 6], [7, 8]]],
+ [[[6, 7], [8, 9]], [[7, 8], [9, 10]]]],
+ [[[[8, 9], [10, 11]], [[9, 10], [11, 12]]],
+ [[[10, 11], [12, 13]], [[11, 12], [13, 14]]],
+ [[[12, 13], [14, 15]], [[13, 14], [15, 16]]],
+ [[[14, 15], [16, 17]], [[15, 16], [17, 18]]]]],
+ np.float32)
score_thresh = 0.1
iou_thresh = .5
max_output_size = 4
@@ -313,61 +303,58 @@ class BatchMulticlassNonMaxSuppressionTest(test_case.TestCase,
[[10, 11], [12, 13]],
[[0, 0], [0, 0]]]])
- (nmsed_boxes, nmsed_scores, nmsed_classes, nmsed_masks,
- nmsed_additional_fields, num_detections
- ) = post_processing.batch_multiclass_non_max_suppression(
- boxes, scores, score_thresh, iou_thresh,
- max_size_per_class=max_output_size, max_total_size=max_output_size,
- masks=masks)
-
- self.assertIsNone(nmsed_additional_fields)
- # Check static shapes
- self.assertAllEqual(nmsed_boxes.shape.as_list(), exp_nms_corners.shape)
- self.assertAllEqual(nmsed_scores.shape.as_list(), exp_nms_scores.shape)
- self.assertAllEqual(nmsed_classes.shape.as_list(), exp_nms_classes.shape)
- self.assertAllEqual(nmsed_masks.shape.as_list(), exp_nms_masks.shape)
- self.assertEqual(num_detections.shape.as_list(), [2])
-
- with self.test_session() as sess:
+ def graph_fn(boxes, scores, masks):
(nmsed_boxes, nmsed_scores, nmsed_classes, nmsed_masks,
- num_detections) = sess.run([nmsed_boxes, nmsed_scores, nmsed_classes,
- nmsed_masks, num_detections])
+ nmsed_additional_fields, num_detections
+ ) = post_processing.batch_multiclass_non_max_suppression(
+ boxes, scores, score_thresh, iou_thresh,
+ max_size_per_class=max_output_size, max_total_size=max_output_size,
+ masks=masks)
+ self.assertIsNone(nmsed_additional_fields)
+ # Check static shapes
+ self.assertAllEqual(nmsed_boxes.shape.as_list(), exp_nms_corners.shape)
+ self.assertAllEqual(nmsed_scores.shape.as_list(), exp_nms_scores.shape)
+ self.assertAllEqual(nmsed_classes.shape.as_list(), exp_nms_classes.shape)
+ self.assertAllEqual(nmsed_masks.shape.as_list(), exp_nms_masks.shape)
+ self.assertEqual(num_detections.shape.as_list(), [2])
+ return (nmsed_boxes, nmsed_scores, nmsed_classes, nmsed_masks,
+ num_detections)
- self.assertAllClose(nmsed_boxes, exp_nms_corners)
- self.assertAllClose(nmsed_scores, exp_nms_scores)
- self.assertAllClose(nmsed_classes, exp_nms_classes)
- self.assertAllClose(num_detections, [2, 3])
- self.assertAllClose(nmsed_masks, exp_nms_masks)
+ (nmsed_boxes, nmsed_scores, nmsed_classes, nmsed_masks,
+ num_detections) = self.execute_cpu(graph_fn, [boxes, scores, masks])
+ self.assertAllClose(nmsed_boxes, exp_nms_corners)
+ self.assertAllClose(nmsed_scores, exp_nms_scores)
+ self.assertAllClose(nmsed_classes, exp_nms_classes)
+ self.assertAllClose(num_detections, [2, 3])
+ self.assertAllClose(nmsed_masks, exp_nms_masks)
def test_batch_multiclass_nms_with_additional_fields(self):
- boxes = tf.constant([[[[0, 0, 1, 1], [0, 0, 4, 5]],
- [[0, 0.1, 1, 1.1], [0, 0.1, 2, 1.1]],
- [[0, -0.1, 1, 0.9], [0, -0.1, 1, 0.9]],
- [[0, 10, 1, 11], [0, 10, 1, 11]]],
- [[[0, 10.1, 1, 11.1], [0, 10.1, 1, 11.1]],
- [[0, 100, 1, 101], [0, 100, 1, 101]],
- [[0, 1000, 1, 1002], [0, 999, 2, 1004]],
- [[0, 1000, 1, 1002.1], [0, 999, 2, 1002.7]]]],
- tf.float32)
- scores = tf.constant([[[.9, 0.01], [.75, 0.05],
- [.6, 0.01], [.95, 0]],
- [[.5, 0.01], [.3, 0.01],
- [.01, .85], [.01, .5]]])
- additional_fields = {
- 'keypoints': tf.constant(
- [[[[6, 7], [8, 9]],
- [[0, 1], [2, 3]],
- [[0, 0], [0, 0]],
- [[0, 0], [0, 0]]],
- [[[13, 14], [15, 16]],
- [[8, 9], [10, 11]],
- [[10, 11], [12, 13]],
- [[0, 0], [0, 0]]]],
- tf.float32)
- }
- additional_fields['size'] = tf.constant(
+ boxes = np.array([[[[0, 0, 1, 1], [0, 0, 4, 5]],
+ [[0, 0.1, 1, 1.1], [0, 0.1, 2, 1.1]],
+ [[0, -0.1, 1, 0.9], [0, -0.1, 1, 0.9]],
+ [[0, 10, 1, 11], [0, 10, 1, 11]]],
+ [[[0, 10.1, 1, 11.1], [0, 10.1, 1, 11.1]],
+ [[0, 100, 1, 101], [0, 100, 1, 101]],
+ [[0, 1000, 1, 1002], [0, 999, 2, 1004]],
+ [[0, 1000, 1, 1002.1], [0, 999, 2, 1002.7]]]],
+ np.float32)
+ scores = np.array([[[.9, 0.01], [.75, 0.05],
+ [.6, 0.01], [.95, 0]],
+ [[.5, 0.01], [.3, 0.01],
+ [.01, .85], [.01, .5]]], np.float32)
+ keypoints = np.array(
+ [[[[6, 7], [8, 9]],
+ [[0, 1], [2, 3]],
+ [[0, 0], [0, 0]],
+ [[0, 0], [0, 0]]],
+ [[[13, 14], [15, 16]],
+ [[8, 9], [10, 11]],
+ [[10, 11], [12, 13]],
+ [[0, 0], [0, 0]]]],
+ np.float32)
+ size = np.array(
[[[[6], [8]], [[0], [2]], [[0], [0]], [[0], [0]]],
- [[[13], [15]], [[8], [10]], [[10], [12]], [[0], [0]]]], tf.float32)
+ [[[13], [15]], [[8], [10]], [[10], [12]], [[0], [0]]]], np.float32)
score_thresh = 0.1
iou_thresh = .5
max_output_size = 4
@@ -399,43 +386,43 @@ class BatchMulticlassNonMaxSuppressionTest(test_case.TestCase,
[[[10], [12]], [[13], [15]],
[[8], [10]], [[0], [0]]]])
- (nmsed_boxes, nmsed_scores, nmsed_classes, nmsed_masks,
- nmsed_additional_fields, num_detections
- ) = post_processing.batch_multiclass_non_max_suppression(
- boxes, scores, score_thresh, iou_thresh,
- max_size_per_class=max_output_size, max_total_size=max_output_size,
- additional_fields=additional_fields)
-
- self.assertIsNone(nmsed_masks)
- # Check static shapes
- self.assertAllEqual(nmsed_boxes.shape.as_list(), exp_nms_corners.shape)
- self.assertAllEqual(nmsed_scores.shape.as_list(), exp_nms_scores.shape)
- self.assertAllEqual(nmsed_classes.shape.as_list(), exp_nms_classes.shape)
- self.assertEqual(len(nmsed_additional_fields),
- len(exp_nms_additional_fields))
- for key in exp_nms_additional_fields:
- self.assertAllEqual(nmsed_additional_fields[key].shape.as_list(),
- exp_nms_additional_fields[key].shape)
- self.assertEqual(num_detections.shape.as_list(), [2])
-
- with self.test_session() as sess:
- (nmsed_boxes, nmsed_scores, nmsed_classes, nmsed_additional_fields,
- num_detections) = sess.run([nmsed_boxes, nmsed_scores, nmsed_classes,
- nmsed_additional_fields, num_detections])
-
- self.assertAllClose(nmsed_boxes, exp_nms_corners)
- self.assertAllClose(nmsed_scores, exp_nms_scores)
- self.assertAllClose(nmsed_classes, exp_nms_classes)
+ def graph_fn(boxes, scores, keypoints, size):
+ additional_fields = {'keypoints': keypoints, 'size': size}
+ (nmsed_boxes, nmsed_scores, nmsed_classes, nmsed_masks,
+ nmsed_additional_fields, num_detections
+ ) = post_processing.batch_multiclass_non_max_suppression(
+ boxes, scores, score_thresh, iou_thresh,
+ max_size_per_class=max_output_size, max_total_size=max_output_size,
+ additional_fields=additional_fields)
+ self.assertIsNone(nmsed_masks)
+ # Check static shapes
+ self.assertAllEqual(nmsed_boxes.shape.as_list(), exp_nms_corners.shape)
+ self.assertAllEqual(nmsed_scores.shape.as_list(), exp_nms_scores.shape)
+ self.assertAllEqual(nmsed_classes.shape.as_list(), exp_nms_classes.shape)
+ self.assertEqual(len(nmsed_additional_fields),
+ len(exp_nms_additional_fields))
for key in exp_nms_additional_fields:
- self.assertAllClose(nmsed_additional_fields[key],
- exp_nms_additional_fields[key])
- self.assertAllClose(num_detections, [2, 3])
-
- def test_batch_multiclass_nms_with_dynamic_batch_size(self):
- boxes_placeholder = tf.placeholder(tf.float32, shape=(None, None, 2, 4))
- scores_placeholder = tf.placeholder(tf.float32, shape=(None, None, 2))
- masks_placeholder = tf.placeholder(tf.float32, shape=(None, None, 2, 2, 2))
+ self.assertAllEqual(nmsed_additional_fields[key].shape.as_list(),
+ exp_nms_additional_fields[key].shape)
+ self.assertEqual(num_detections.shape.as_list(), [2])
+ return (nmsed_boxes, nmsed_scores, nmsed_classes,
+ nmsed_additional_fields['keypoints'],
+ nmsed_additional_fields['size'],
+ num_detections)
+ (nmsed_boxes, nmsed_scores, nmsed_classes, nmsed_keypoints, nmsed_size,
+ num_detections) = self.execute_cpu(graph_fn, [boxes, scores, keypoints,
+ size])
+
+ self.assertAllClose(nmsed_boxes, exp_nms_corners)
+ self.assertAllClose(nmsed_scores, exp_nms_scores)
+ self.assertAllClose(nmsed_classes, exp_nms_classes)
+ self.assertAllClose(nmsed_keypoints,
+ exp_nms_additional_fields['keypoints'])
+ self.assertAllClose(nmsed_size,
+ exp_nms_additional_fields['size'])
+ self.assertAllClose(num_detections, [2, 3])
+ def test_batch_multiclass_nms_with_masks_and_num_valid_boxes(self):
boxes = np.array([[[[0, 0, 1, 1], [0, 0, 4, 5]],
[[0, 0.1, 1, 1.1], [0, 0.1, 2, 1.1]],
[[0, -0.1, 1, 0.9], [0, -0.1, 1, 0.9]],
@@ -443,11 +430,12 @@ class BatchMulticlassNonMaxSuppressionTest(test_case.TestCase,
[[[0, 10.1, 1, 11.1], [0, 10.1, 1, 11.1]],
[[0, 100, 1, 101], [0, 100, 1, 101]],
[[0, 1000, 1, 1002], [0, 999, 2, 1004]],
- [[0, 1000, 1, 1002.1], [0, 999, 2, 1002.7]]]])
+ [[0, 1000, 1, 1002.1], [0, 999, 2, 1002.7]]]],
+ np.float32)
scores = np.array([[[.9, 0.01], [.75, 0.05],
[.6, 0.01], [.95, 0]],
[[.5, 0.01], [.3, 0.01],
- [.01, .85], [.01, .5]]])
+ [.01, .85], [.01, .5]]], np.float32)
masks = np.array([[[[[0, 1], [2, 3]], [[1, 2], [3, 4]]],
[[[2, 3], [4, 5]], [[3, 4], [5, 6]]],
[[[4, 5], [6, 7]], [[5, 6], [7, 8]]],
@@ -455,84 +443,9 @@ class BatchMulticlassNonMaxSuppressionTest(test_case.TestCase,
[[[[8, 9], [10, 11]], [[9, 10], [11, 12]]],
[[[10, 11], [12, 13]], [[11, 12], [13, 14]]],
[[[12, 13], [14, 15]], [[13, 14], [15, 16]]],
- [[[14, 15], [16, 17]], [[15, 16], [17, 18]]]]])
- score_thresh = 0.1
- iou_thresh = .5
- max_output_size = 4
-
- exp_nms_corners = np.array([[[0, 10, 1, 11],
- [0, 0, 1, 1],
- [0, 0, 0, 0],
- [0, 0, 0, 0]],
- [[0, 999, 2, 1004],
- [0, 10.1, 1, 11.1],
- [0, 100, 1, 101],
- [0, 0, 0, 0]]])
- exp_nms_scores = np.array([[.95, .9, 0, 0],
- [.85, .5, .3, 0]])
- exp_nms_classes = np.array([[0, 0, 0, 0],
- [1, 0, 0, 0]])
- exp_nms_masks = np.array([[[[6, 7], [8, 9]],
- [[0, 1], [2, 3]],
- [[0, 0], [0, 0]],
- [[0, 0], [0, 0]]],
- [[[13, 14], [15, 16]],
- [[8, 9], [10, 11]],
- [[10, 11], [12, 13]],
- [[0, 0], [0, 0]]]])
-
- (nmsed_boxes, nmsed_scores, nmsed_classes, nmsed_masks,
- nmsed_additional_fields, num_detections
- ) = post_processing.batch_multiclass_non_max_suppression(
- boxes_placeholder, scores_placeholder, score_thresh, iou_thresh,
- max_size_per_class=max_output_size, max_total_size=max_output_size,
- masks=masks_placeholder)
-
- self.assertIsNone(nmsed_additional_fields)
- # Check static shapes
- self.assertAllEqual(nmsed_boxes.shape.as_list(), [None, 4, 4])
- self.assertAllEqual(nmsed_scores.shape.as_list(), [None, 4])
- self.assertAllEqual(nmsed_classes.shape.as_list(), [None, 4])
- self.assertAllEqual(nmsed_masks.shape.as_list(), [None, 4, 2, 2])
- self.assertEqual(num_detections.shape.as_list(), [None])
-
- with self.test_session() as sess:
- (nmsed_boxes, nmsed_scores, nmsed_classes, nmsed_masks,
- num_detections) = sess.run([nmsed_boxes, nmsed_scores, nmsed_classes,
- nmsed_masks, num_detections],
- feed_dict={boxes_placeholder: boxes,
- scores_placeholder: scores,
- masks_placeholder: masks})
- self.assertAllClose(nmsed_boxes, exp_nms_corners)
- self.assertAllClose(nmsed_scores, exp_nms_scores)
- self.assertAllClose(nmsed_classes, exp_nms_classes)
- self.assertAllClose(num_detections, [2, 3])
- self.assertAllClose(nmsed_masks, exp_nms_masks)
-
- def test_batch_multiclass_nms_with_masks_and_num_valid_boxes(self):
- boxes = tf.constant([[[[0, 0, 1, 1], [0, 0, 4, 5]],
- [[0, 0.1, 1, 1.1], [0, 0.1, 2, 1.1]],
- [[0, -0.1, 1, 0.9], [0, -0.1, 1, 0.9]],
- [[0, 10, 1, 11], [0, 10, 1, 11]]],
- [[[0, 10.1, 1, 11.1], [0, 10.1, 1, 11.1]],
- [[0, 100, 1, 101], [0, 100, 1, 101]],
- [[0, 1000, 1, 1002], [0, 999, 2, 1004]],
- [[0, 1000, 1, 1002.1], [0, 999, 2, 1002.7]]]],
- tf.float32)
- scores = tf.constant([[[.9, 0.01], [.75, 0.05],
- [.6, 0.01], [.95, 0]],
- [[.5, 0.01], [.3, 0.01],
- [.01, .85], [.01, .5]]])
- masks = tf.constant([[[[[0, 1], [2, 3]], [[1, 2], [3, 4]]],
- [[[2, 3], [4, 5]], [[3, 4], [5, 6]]],
- [[[4, 5], [6, 7]], [[5, 6], [7, 8]]],
- [[[6, 7], [8, 9]], [[7, 8], [9, 10]]]],
- [[[[8, 9], [10, 11]], [[9, 10], [11, 12]]],
- [[[10, 11], [12, 13]], [[11, 12], [13, 14]]],
- [[[12, 13], [14, 15]], [[13, 14], [15, 16]]],
- [[[14, 15], [16, 17]], [[15, 16], [17, 18]]]]],
- tf.float32)
- num_valid_boxes = tf.constant([1, 1], tf.int32)
+ [[[14, 15], [16, 17]], [[15, 16], [17, 18]]]]],
+ np.float32)
+ num_valid_boxes = np.array([1, 1], np.int32)
score_thresh = 0.1
iou_thresh = .5
max_output_size = 4
@@ -558,58 +471,56 @@ class BatchMulticlassNonMaxSuppressionTest(test_case.TestCase,
[[0, 0], [0, 0]],
[[0, 0], [0, 0]]]]
- (nmsed_boxes, nmsed_scores, nmsed_classes, nmsed_masks,
- nmsed_additional_fields, num_detections
- ) = post_processing.batch_multiclass_non_max_suppression(
- boxes, scores, score_thresh, iou_thresh,
- max_size_per_class=max_output_size, max_total_size=max_output_size,
- num_valid_boxes=num_valid_boxes, masks=masks)
-
- self.assertIsNone(nmsed_additional_fields)
-
- with self.test_session() as sess:
+ def graph_fn(boxes, scores, masks, num_valid_boxes):
(nmsed_boxes, nmsed_scores, nmsed_classes, nmsed_masks,
- num_detections) = sess.run([nmsed_boxes, nmsed_scores, nmsed_classes,
- nmsed_masks, num_detections])
- self.assertAllClose(nmsed_boxes, exp_nms_corners)
- self.assertAllClose(nmsed_scores, exp_nms_scores)
- self.assertAllClose(nmsed_classes, exp_nms_classes)
- self.assertAllClose(num_detections, [1, 1])
- self.assertAllClose(nmsed_masks, exp_nms_masks)
+ nmsed_additional_fields, num_detections
+ ) = post_processing.batch_multiclass_non_max_suppression(
+ boxes, scores, score_thresh, iou_thresh,
+ max_size_per_class=max_output_size, max_total_size=max_output_size,
+ masks=masks, num_valid_boxes=num_valid_boxes)
+ self.assertIsNone(nmsed_additional_fields)
+ return (nmsed_boxes, nmsed_scores, nmsed_classes, nmsed_masks,
+ num_detections)
+
+ (nmsed_boxes, nmsed_scores, nmsed_classes, nmsed_masks,
+ num_detections) = self.execute_cpu(graph_fn, [boxes, scores, masks,
+ num_valid_boxes])
+ self.assertAllClose(nmsed_boxes, exp_nms_corners)
+ self.assertAllClose(nmsed_scores, exp_nms_scores)
+ self.assertAllClose(nmsed_classes, exp_nms_classes)
+ self.assertAllClose(num_detections, [1, 1])
+ self.assertAllClose(nmsed_masks, exp_nms_masks)
def test_batch_multiclass_nms_with_additional_fields_and_num_valid_boxes(
self):
- boxes = tf.constant([[[[0, 0, 1, 1], [0, 0, 4, 5]],
- [[0, 0.1, 1, 1.1], [0, 0.1, 2, 1.1]],
- [[0, -0.1, 1, 0.9], [0, -0.1, 1, 0.9]],
- [[0, 10, 1, 11], [0, 10, 1, 11]]],
- [[[0, 10.1, 1, 11.1], [0, 10.1, 1, 11.1]],
- [[0, 100, 1, 101], [0, 100, 1, 101]],
- [[0, 1000, 1, 1002], [0, 999, 2, 1004]],
- [[0, 1000, 1, 1002.1], [0, 999, 2, 1002.7]]]],
- tf.float32)
- scores = tf.constant([[[.9, 0.01], [.75, 0.05],
- [.6, 0.01], [.95, 0]],
- [[.5, 0.01], [.3, 0.01],
- [.01, .85], [.01, .5]]])
- additional_fields = {
- 'keypoints': tf.constant(
- [[[[6, 7], [8, 9]],
- [[0, 1], [2, 3]],
- [[0, 0], [0, 0]],
- [[0, 0], [0, 0]]],
- [[[13, 14], [15, 16]],
- [[8, 9], [10, 11]],
- [[10, 11], [12, 13]],
- [[0, 0], [0, 0]]]],
- tf.float32)
- }
-
- additional_fields['size'] = tf.constant(
+ boxes = np.array([[[[0, 0, 1, 1], [0, 0, 4, 5]],
+ [[0, 0.1, 1, 1.1], [0, 0.1, 2, 1.1]],
+ [[0, -0.1, 1, 0.9], [0, -0.1, 1, 0.9]],
+ [[0, 10, 1, 11], [0, 10, 1, 11]]],
+ [[[0, 10.1, 1, 11.1], [0, 10.1, 1, 11.1]],
+ [[0, 100, 1, 101], [0, 100, 1, 101]],
+ [[0, 1000, 1, 1002], [0, 999, 2, 1004]],
+ [[0, 1000, 1, 1002.1], [0, 999, 2, 1002.7]]]],
+ np.float32)
+ scores = np.array([[[.9, 0.01], [.75, 0.05],
+ [.6, 0.01], [.95, 0]],
+ [[.5, 0.01], [.3, 0.01],
+ [.01, .85], [.01, .5]]], np.float32)
+ keypoints = np.array(
+ [[[[6, 7], [8, 9]],
+ [[0, 1], [2, 3]],
+ [[0, 0], [0, 0]],
+ [[0, 0], [0, 0]]],
+ [[[13, 14], [15, 16]],
+ [[8, 9], [10, 11]],
+ [[10, 11], [12, 13]],
+ [[0, 0], [0, 0]]]],
+ np.float32)
+ size = np.array(
[[[[7], [9]], [[1], [3]], [[0], [0]], [[0], [0]]],
- [[[14], [16]], [[9], [11]], [[11], [13]], [[0], [0]]]], tf.float32)
+ [[[14], [16]], [[9], [11]], [[11], [13]], [[0], [0]]]], np.float32)
- num_valid_boxes = tf.constant([1, 1], tf.int32)
+ num_valid_boxes = np.array([1, 1], np.int32)
score_thresh = 0.1
iou_thresh = .5
max_output_size = 4
@@ -641,45 +552,48 @@ class BatchMulticlassNonMaxSuppressionTest(test_case.TestCase,
[[0], [0]], [[0], [0]]],
[[[14], [16]], [[0], [0]],
[[0], [0]], [[0], [0]]]])
-
- (nmsed_boxes, nmsed_scores, nmsed_classes, nmsed_masks,
- nmsed_additional_fields, num_detections
- ) = post_processing.batch_multiclass_non_max_suppression(
- boxes, scores, score_thresh, iou_thresh,
- max_size_per_class=max_output_size, max_total_size=max_output_size,
- num_valid_boxes=num_valid_boxes,
- additional_fields=additional_fields)
-
- self.assertIsNone(nmsed_masks)
-
- with self.test_session() as sess:
- (nmsed_boxes, nmsed_scores, nmsed_classes, nmsed_additional_fields,
- num_detections) = sess.run([nmsed_boxes, nmsed_scores, nmsed_classes,
- nmsed_additional_fields, num_detections])
-
- self.assertAllClose(nmsed_boxes, exp_nms_corners)
- self.assertAllClose(nmsed_scores, exp_nms_scores)
- self.assertAllClose(nmsed_classes, exp_nms_classes)
- for key in exp_nms_additional_fields:
- self.assertAllClose(nmsed_additional_fields[key],
- exp_nms_additional_fields[key])
- self.assertAllClose(num_detections, [1, 1])
+ def graph_fn(boxes, scores, keypoints, size, num_valid_boxes):
+ additional_fields = {'keypoints': keypoints, 'size': size}
+ (nmsed_boxes, nmsed_scores, nmsed_classes, nmsed_masks,
+ nmsed_additional_fields, num_detections
+ ) = post_processing.batch_multiclass_non_max_suppression(
+ boxes, scores, score_thresh, iou_thresh,
+ max_size_per_class=max_output_size, max_total_size=max_output_size,
+ num_valid_boxes=num_valid_boxes,
+ additional_fields=additional_fields)
+ self.assertIsNone(nmsed_masks)
+ return (nmsed_boxes, nmsed_scores, nmsed_classes,
+ nmsed_additional_fields['keypoints'],
+ nmsed_additional_fields['size'], num_detections)
+
+ (nmsed_boxes, nmsed_scores, nmsed_classes, nmsed_keypoints, nmsed_size,
+ num_detections) = self.execute_cpu(graph_fn, [boxes, scores, keypoints,
+ size, num_valid_boxes])
+
+ self.assertAllClose(nmsed_boxes, exp_nms_corners)
+ self.assertAllClose(nmsed_scores, exp_nms_scores)
+ self.assertAllClose(nmsed_classes, exp_nms_classes)
+ self.assertAllClose(nmsed_keypoints,
+ exp_nms_additional_fields['keypoints'])
+ self.assertAllClose(nmsed_size,
+ exp_nms_additional_fields['size'])
+ self.assertAllClose(num_detections, [1, 1])
def test_combined_nms_with_batch_size_2(self):
"""Test use_combined_nms."""
- boxes = tf.constant([[[[0, 0, 0.1, 0.1], [0, 0, 0.1, 0.1]],
- [[0, 0.01, 1, 0.11], [0, 0.6, 0.1, 0.7]],
- [[0, -0.01, 0.1, 0.09], [0, -0.1, 0.1, 0.09]],
- [[0, 0.11, 0.1, 0.2], [0, 0.11, 0.1, 0.2]]],
- [[[0, 0, 0.2, 0.2], [0, 0, 0.2, 0.2]],
- [[0, 0.02, 0.2, 0.22], [0, 0.02, 0.2, 0.22]],
- [[0, -0.02, 0.2, 0.19], [0, -0.02, 0.2, 0.19]],
- [[0, 0.21, 0.2, 0.3], [0, 0.21, 0.2, 0.3]]]],
- tf.float32)
- scores = tf.constant([[[.1, 0.9], [.75, 0.8],
- [.6, 0.3], [0.95, 0.1]],
- [[.1, 0.9], [.75, 0.8],
- [.6, .3], [.95, .1]]])
+ boxes = np.array([[[[0, 0, 0.1, 0.1], [0, 0, 0.1, 0.1]],
+ [[0, 0.01, 1, 0.11], [0, 0.6, 0.1, 0.7]],
+ [[0, -0.01, 0.1, 0.09], [0, -0.1, 0.1, 0.09]],
+ [[0, 0.11, 0.1, 0.2], [0, 0.11, 0.1, 0.2]]],
+ [[[0, 0, 0.2, 0.2], [0, 0, 0.2, 0.2]],
+ [[0, 0.02, 0.2, 0.22], [0, 0.02, 0.2, 0.22]],
+ [[0, -0.02, 0.2, 0.19], [0, -0.02, 0.2, 0.19]],
+ [[0, 0.21, 0.2, 0.3], [0, 0.21, 0.2, 0.3]]]],
+ np.float32)
+ scores = np.array([[[.1, 0.9], [.75, 0.8],
+ [.6, 0.3], [0.95, 0.1]],
+ [[.1, 0.9], [.75, 0.8],
+ [.6, .3], [.95, .1]]], np.float32)
score_thresh = 0.1
iou_thresh = .5
max_output_size = 3
@@ -695,27 +609,78 @@ class BatchMulticlassNonMaxSuppressionTest(test_case.TestCase,
exp_nms_classes = np.array([[0, 1, 1],
[0, 1, 0]])
- (nmsed_boxes, nmsed_scores, nmsed_classes, nmsed_masks,
- nmsed_additional_fields, num_detections
- ) = post_processing.batch_multiclass_non_max_suppression(
- boxes, scores, score_thresh, iou_thresh,
- max_size_per_class=max_output_size, max_total_size=max_output_size,
- use_static_shapes=True,
- use_combined_nms=True)
-
- self.assertIsNone(nmsed_masks)
- self.assertIsNone(nmsed_additional_fields)
-
- with self.test_session() as sess:
- (nmsed_boxes, nmsed_scores, nmsed_classes,
- num_detections) = sess.run([nmsed_boxes, nmsed_scores, nmsed_classes,
- num_detections])
- self.assertAllClose(nmsed_boxes, exp_nms_corners)
- self.assertAllClose(nmsed_scores, exp_nms_scores)
- self.assertAllClose(nmsed_classes, exp_nms_classes)
- self.assertListEqual(num_detections.tolist(), [3, 3])
-
- # TODO(bhattad): Remove conditional after CMLE moves to TF 1.9
+ def graph_fn(boxes, scores):
+ (nmsed_boxes, nmsed_scores, nmsed_classes, nmsed_masks,
+ nmsed_additional_fields, num_detections
+ ) = post_processing.batch_multiclass_non_max_suppression(
+ boxes, scores, score_thresh, iou_thresh,
+ max_size_per_class=max_output_size, max_total_size=max_output_size,
+ use_static_shapes=True,
+ use_combined_nms=True)
+ self.assertIsNone(nmsed_masks)
+ self.assertIsNone(nmsed_additional_fields)
+ return (nmsed_boxes, nmsed_scores, nmsed_classes, num_detections)
+
+ (nmsed_boxes, nmsed_scores, nmsed_classes,
+ num_detections) = self.execute_cpu(graph_fn, [boxes, scores])
+ self.assertAllClose(nmsed_boxes, exp_nms_corners)
+ self.assertAllClose(nmsed_scores, exp_nms_scores)
+ self.assertAllClose(nmsed_classes, exp_nms_classes)
+ self.assertListEqual(num_detections.tolist(), [3, 3])
+
+ def test_batch_multiclass_nms_with_use_static_shapes(self):
+ boxes = np.array([[[[0, 0, 1, 1], [0, 0, 4, 5]],
+ [[0, 0.1, 1, 1.1], [0, 0.1, 2, 1.1]],
+ [[0, -0.1, 1, 0.9], [0, -0.1, 1, 0.9]],
+ [[0, 10, 1, 11], [0, 10, 1, 11]]],
+ [[[0, 10.1, 1, 11.1], [0, 10.1, 1, 11.1]],
+ [[0, 100, 1, 101], [0, 100, 1, 101]],
+ [[0, 1000, 1, 1002], [0, 999, 2, 1004]],
+ [[0, 1000, 1, 1002.1], [0, 999, 2, 1002.7]]]],
+ np.float32)
+ scores = np.array([[[.9, 0.01], [.75, 0.05],
+ [.6, 0.01], [.95, 0]],
+ [[.5, 0.01], [.3, 0.01],
+ [.01, .85], [.01, .5]]],
+ np.float32)
+ clip_window = np.array([[0., 0., 5., 5.],
+ [0., 0., 200., 200.]],
+ np.float32)
+ score_thresh = 0.1
+ iou_thresh = .5
+ max_output_size = 4
+
+ exp_nms_corners = np.array([[[0, 0, 1, 1],
+ [0, 0, 0, 0],
+ [0, 0, 0, 0],
+ [0, 0, 0, 0]],
+ [[0, 10.1, 1, 11.1],
+ [0, 100, 1, 101],
+ [0, 0, 0, 0],
+ [0, 0, 0, 0]]])
+ exp_nms_scores = np.array([[.9, 0., 0., 0.],
+ [.5, .3, 0, 0]])
+ exp_nms_classes = np.array([[0, 0, 0, 0],
+ [0, 0, 0, 0]])
+
+ def graph_fn(boxes, scores, clip_window):
+ (nmsed_boxes, nmsed_scores, nmsed_classes, _, _, num_detections
+ ) = post_processing.batch_multiclass_non_max_suppression(
+ boxes, scores, score_thresh, iou_thresh,
+ max_size_per_class=max_output_size, clip_window=clip_window,
+ use_static_shapes=True)
+ return nmsed_boxes, nmsed_scores, nmsed_classes, num_detections
+
+ (nmsed_boxes, nmsed_scores, nmsed_classes,
+ num_detections) = self.execute(graph_fn, [boxes, scores, clip_window])
+ for i in range(len(num_detections)):
+ self.assertAllClose(nmsed_boxes[i, 0:num_detections[i]],
+ exp_nms_corners[i, 0:num_detections[i]])
+ self.assertAllClose(nmsed_scores[i, 0:num_detections[i]],
+ exp_nms_scores[i, 0:num_detections[i]])
+ self.assertAllClose(nmsed_classes[i, 0:num_detections[i]],
+ exp_nms_classes[i, 0:num_detections[i]])
+ self.assertAllClose(num_detections, [1, 2])
if __name__ == '__main__':
tf.test.main()
diff --git a/research/object_detection/core/batcher.py b/research/object_detection/core/batcher.py
index 832e22420d0cd5d3081805ff4cf0f7ae05c20f9e..26832e30efa43a15436070e8676b1d020712a794 100644
--- a/research/object_detection/core/batcher.py
+++ b/research/object_detection/core/batcher.py
@@ -24,10 +24,6 @@ from six.moves import range
import tensorflow.compat.v1 as tf
from object_detection.core import prefetcher
-from object_detection.utils import tf_version
-
-if not tf_version.is_tf1():
- raise ValueError('`batcher.py` is only supported in Tensorflow 1.X')
rt_shape_str = '_runtime_shapes'
diff --git a/research/object_detection/core/batcher_tf1_test.py b/research/object_detection/core/batcher_tf1_test.py
index 8f443a942c2af67650de67d2d0583df07e6e9e0e..1688b87cdf08bc29ddb2413776757066047c80da 100644
--- a/research/object_detection/core/batcher_tf1_test.py
+++ b/research/object_detection/core/batcher_tf1_test.py
@@ -19,14 +19,17 @@ from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
+import unittest
import numpy as np
from six.moves import range
import tensorflow.compat.v1 as tf
import tf_slim as slim
from object_detection.core import batcher
+from object_detection.utils import tf_version
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.')
class BatcherTest(tf.test.TestCase):
def test_batch_and_unpad_2d_tensors_of_different_sizes_in_1st_dimension(self):
diff --git a/research/object_detection/core/freezable_batch_norm_test.py b/research/object_detection/core/freezable_batch_norm_tf2_test.py
similarity index 98%
rename from research/object_detection/core/freezable_batch_norm_test.py
rename to research/object_detection/core/freezable_batch_norm_tf2_test.py
index 8379a38398414e89c611f2247209fb087fa9be31..4cc42ae3ef7da9b3412d2f461d7f9db62420e603 100644
--- a/research/object_detection/core/freezable_batch_norm_test.py
+++ b/research/object_detection/core/freezable_batch_norm_tf2_test.py
@@ -17,15 +17,17 @@
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
-
+import unittest
import numpy as np
from six.moves import zip
import tensorflow.compat.v1 as tf
from object_detection.core import freezable_batch_norm
+from object_detection.utils import tf_version
+@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.')
class FreezableBatchNormTest(tf.test.TestCase):
"""Tests for FreezableBatchNorm operations."""
diff --git a/research/object_detection/core/keypoint_ops.py b/research/object_detection/core/keypoint_ops.py
index e321783d986b3c330300f347158c261a7e3f94a6..1b0c4ccfed42aae492550331e870173c624f0316 100644
--- a/research/object_detection/core/keypoint_ops.py
+++ b/research/object_detection/core/keypoint_ops.py
@@ -217,7 +217,7 @@ def to_absolute_coordinates(keypoints, height, width,
return scale(keypoints, height, width)
-def flip_horizontal(keypoints, flip_point, flip_permutation, scope=None):
+def flip_horizontal(keypoints, flip_point, flip_permutation=None, scope=None):
"""Flips the keypoints horizontally around the flip_point.
This operation flips the x coordinate for each keypoint around the flip_point
@@ -227,13 +227,14 @@ def flip_horizontal(keypoints, flip_point, flip_permutation, scope=None):
keypoints: a tensor of shape [num_instances, num_keypoints, 2]
flip_point: (float) scalar tensor representing the x coordinate to flip the
keypoints around.
- flip_permutation: rank 1 int32 tensor containing the keypoint flip
- permutation. This specifies the mapping from original keypoint indices
- to the flipped keypoint indices. This is used primarily for keypoints
- that are not reflection invariant. E.g. Suppose there are 3 keypoints
- representing ['head', 'right_eye', 'left_eye'], then a logical choice for
- flip_permutation might be [0, 2, 1] since we want to swap the 'left_eye'
- and 'right_eye' after a horizontal flip.
+ flip_permutation: integer list or rank 1 int32 tensor containing the
+ keypoint flip permutation. This specifies the mapping from original
+ keypoint indices to the flipped keypoint indices. This is used primarily
+ for keypoints that are not reflection invariant. E.g. Suppose there are 3
+ keypoints representing ['head', 'right_eye', 'left_eye'], then a logical
+ choice for flip_permutation might be [0, 2, 1] since we want to swap the
+ 'left_eye' and 'right_eye' after a horizontal flip.
+ Default to None or empty list to keep the original order after flip.
scope: name scope.
Returns:
@@ -241,7 +242,8 @@ def flip_horizontal(keypoints, flip_point, flip_permutation, scope=None):
"""
with tf.name_scope(scope, 'FlipHorizontal'):
keypoints = tf.transpose(keypoints, [1, 0, 2])
- keypoints = tf.gather(keypoints, flip_permutation)
+ if flip_permutation:
+ keypoints = tf.gather(keypoints, flip_permutation)
v, u = tf.split(value=keypoints, num_or_size_splits=2, axis=2)
u = flip_point * 2.0 - u
new_keypoints = tf.concat([v, u], 2)
@@ -249,7 +251,7 @@ def flip_horizontal(keypoints, flip_point, flip_permutation, scope=None):
return new_keypoints
-def flip_vertical(keypoints, flip_point, flip_permutation, scope=None):
+def flip_vertical(keypoints, flip_point, flip_permutation=None, scope=None):
"""Flips the keypoints vertically around the flip_point.
This operation flips the y coordinate for each keypoint around the flip_point
@@ -259,13 +261,14 @@ def flip_vertical(keypoints, flip_point, flip_permutation, scope=None):
keypoints: a tensor of shape [num_instances, num_keypoints, 2]
flip_point: (float) scalar tensor representing the y coordinate to flip the
keypoints around.
- flip_permutation: rank 1 int32 tensor containing the keypoint flip
- permutation. This specifies the mapping from original keypoint indices
- to the flipped keypoint indices. This is used primarily for keypoints
- that are not reflection invariant. E.g. Suppose there are 3 keypoints
- representing ['head', 'right_eye', 'left_eye'], then a logical choice for
- flip_permutation might be [0, 2, 1] since we want to swap the 'left_eye'
- and 'right_eye' after a horizontal flip.
+ flip_permutation: integer list or rank 1 int32 tensor containing the
+ keypoint flip permutation. This specifies the mapping from original
+ keypoint indices to the flipped keypoint indices. This is used primarily
+ for keypoints that are not reflection invariant. E.g. Suppose there are 3
+ keypoints representing ['head', 'right_eye', 'left_eye'], then a logical
+ choice for flip_permutation might be [0, 2, 1] since we want to swap the
+ 'left_eye' and 'right_eye' after a horizontal flip.
+ Default to None or empty list to keep the original order after flip.
scope: name scope.
Returns:
@@ -273,7 +276,8 @@ def flip_vertical(keypoints, flip_point, flip_permutation, scope=None):
"""
with tf.name_scope(scope, 'FlipVertical'):
keypoints = tf.transpose(keypoints, [1, 0, 2])
- keypoints = tf.gather(keypoints, flip_permutation)
+ if flip_permutation:
+ keypoints = tf.gather(keypoints, flip_permutation)
v, u = tf.split(value=keypoints, num_or_size_splits=2, axis=2)
v = flip_point * 2.0 - v
new_keypoints = tf.concat([v, u], 2)
@@ -281,18 +285,24 @@ def flip_vertical(keypoints, flip_point, flip_permutation, scope=None):
return new_keypoints
-def rot90(keypoints, scope=None):
+def rot90(keypoints, rotation_permutation=None, scope=None):
"""Rotates the keypoints counter-clockwise by 90 degrees.
Args:
keypoints: a tensor of shape [num_instances, num_keypoints, 2]
+ rotation_permutation: integer list or rank 1 int32 tensor containing the
+ keypoint flip permutation. This specifies the mapping from original
+ keypoint indices to the rotated keypoint indices. This is used primarily
+ for keypoints that are not rotation invariant.
+ Default to None or empty list to keep the original order after rotation.
scope: name scope.
-
Returns:
new_keypoints: a tensor of shape [num_instances, num_keypoints, 2]
"""
with tf.name_scope(scope, 'Rot90'):
keypoints = tf.transpose(keypoints, [1, 0, 2])
+ if rotation_permutation:
+ keypoints = tf.gather(keypoints, rotation_permutation)
v, u = tf.split(value=keypoints[:, :, ::-1], num_or_size_splits=2, axis=2)
v = 1.0 - v
new_keypoints = tf.concat([v, u], 2)
diff --git a/research/object_detection/core/keypoint_ops_test.py b/research/object_detection/core/keypoint_ops_test.py
index 695e8fa1c6efcac8900577cd4657393b01d6d8d1..bbdcf01940dcaf96da283bd6bcf73e91b633f0ee 100644
--- a/research/object_detection/core/keypoint_ops_test.py
+++ b/research/object_detection/core/keypoint_ops_test.py
@@ -180,6 +180,21 @@ class KeypointOpsTest(test_case.TestCase):
[[0.1, 0.1], [0.2, 0.2], [0.3, 0.3]],
[[0.4, 0.4], [0.5, 0.5], [0.6, 0.6]]
])
+ expected_keypoints = tf.constant([
+ [[0.1, 0.9], [0.2, 0.8], [0.3, 0.7]],
+ [[0.4, 0.6], [0.5, 0.5], [0.6, 0.4]],
+ ])
+ output = keypoint_ops.flip_horizontal(keypoints, 0.5)
+ return output, expected_keypoints
+
+ output, expected_keypoints = self.execute(graph_fn, [])
+ self.assertAllClose(output, expected_keypoints)
+
+ def test_flip_horizontal_permutation(self):
+
+ def graph_fn():
+ keypoints = tf.constant([[[0.1, 0.1], [0.2, 0.2], [0.3, 0.3]],
+ [[0.4, 0.4], [0.5, 0.5], [0.6, 0.6]]])
flip_permutation = [0, 2, 1]
expected_keypoints = tf.constant([
@@ -197,6 +212,22 @@ class KeypointOpsTest(test_case.TestCase):
[[0.1, 0.1], [0.2, 0.2], [0.3, 0.3]],
[[0.4, 0.4], [0.5, 0.5], [0.6, 0.6]]
])
+
+ expected_keypoints = tf.constant([
+ [[0.9, 0.1], [0.8, 0.2], [0.7, 0.3]],
+ [[0.6, 0.4], [0.5, 0.5], [0.4, 0.6]],
+ ])
+ output = keypoint_ops.flip_vertical(keypoints, 0.5)
+ return output, expected_keypoints
+
+ output, expected_keypoints = self.execute(graph_fn, [])
+ self.assertAllClose(output, expected_keypoints)
+
+ def test_flip_vertical_permutation(self):
+
+ def graph_fn():
+ keypoints = tf.constant([[[0.1, 0.1], [0.2, 0.2], [0.3, 0.3]],
+ [[0.4, 0.4], [0.5, 0.5], [0.6, 0.6]]])
flip_permutation = [0, 2, 1]
expected_keypoints = tf.constant([
@@ -223,6 +254,23 @@ class KeypointOpsTest(test_case.TestCase):
output, expected_keypoints = self.execute(graph_fn, [])
self.assertAllClose(output, expected_keypoints)
+ def test_rot90_permutation(self):
+
+ def graph_fn():
+ keypoints = tf.constant([[[0.1, 0.1], [0.2, 0.2], [0.3, 0.3]],
+ [[0.4, 0.6], [0.5, 0.6], [0.6, 0.7]]])
+ rot_permutation = [0, 2, 1]
+ expected_keypoints = tf.constant([
+ [[0.9, 0.1], [0.7, 0.3], [0.8, 0.2]],
+ [[0.4, 0.4], [0.3, 0.6], [0.4, 0.5]],
+ ])
+ output = keypoint_ops.rot90(keypoints,
+ rotation_permutation=rot_permutation)
+ return output, expected_keypoints
+
+ output, expected_keypoints = self.execute(graph_fn, [])
+ self.assertAllClose(output, expected_keypoints)
+
def test_keypoint_weights_from_visibilities(self):
def graph_fn():
keypoint_visibilities = tf.constant([
diff --git a/research/object_detection/core/losses.py b/research/object_detection/core/losses.py
index 07e7dd3ff4c6d69d32ac11a305551fb716262eb2..c4d499e7e6c4ed5da803c48ff3d8908e713a3c2e 100644
--- a/research/object_detection/core/losses.py
+++ b/research/object_detection/core/losses.py
@@ -681,3 +681,95 @@ class HardExampleMiner(object):
num_positives, num_negatives)
+class PenaltyReducedLogisticFocalLoss(Loss):
+ """Penalty-reduced pixelwise logistic regression with focal loss.
+
+ The loss is defined in Equation (1) of the Objects as Points[1] paper.
+ Although the loss is defined per-pixel in the output space, this class
+ assumes that each pixel is an anchor to be compatible with the base class.
+
+ [1]: https://arxiv.org/abs/1904.07850
+ """
+
+ def __init__(self, alpha=2.0, beta=4.0, sigmoid_clip_value=1e-4):
+ """Constructor.
+
+ Args:
+ alpha: Focussing parameter of the focal loss. Increasing this will
+ decrease the loss contribution of the well classified examples.
+ beta: The local penalty reduction factor. Increasing this will decrease
+ the contribution of loss due to negative pixels near the keypoint.
+ sigmoid_clip_value: The sigmoid operation used internally will be clipped
+ between [sigmoid_clip_value, 1 - sigmoid_clip_value)
+ """
+ self._alpha = alpha
+ self._beta = beta
+ self._sigmoid_clip_value = sigmoid_clip_value
+ super(PenaltyReducedLogisticFocalLoss, self).__init__()
+
+ def _compute_loss(self, prediction_tensor, target_tensor, weights):
+ """Compute loss function.
+
+ In all input tensors, `num_anchors` is the total number of pixels in the
+ the output space.
+
+ Args:
+ prediction_tensor: A float tensor of shape [batch_size, num_anchors,
+ num_classes] representing the predicted unscaled logits for each class.
+ The function will compute sigmoid on this tensor internally.
+ target_tensor: A float tensor of shape [batch_size, num_anchors,
+ num_classes] representing a tensor with the 'splatted' keypoints,
+ possibly using a gaussian kernel. This function assumes that
+ the target is bounded between [0, 1].
+ weights: a float tensor of shape, either [batch_size, num_anchors,
+ num_classes] or [batch_size, num_anchors, 1]. If the shape is
+ [batch_size, num_anchors, 1], all the classses are equally weighted.
+
+
+ Returns:
+ loss: a float tensor of shape [batch_size, num_anchors, num_classes]
+ representing the value of the loss function.
+ """
+
+ is_present_tensor = tf.math.equal(target_tensor, 1.0)
+ prediction_tensor = tf.clip_by_value(tf.sigmoid(prediction_tensor),
+ self._sigmoid_clip_value,
+ 1 - self._sigmoid_clip_value)
+
+ positive_loss = (tf.math.pow((1 - prediction_tensor), self._alpha)*
+ tf.math.log(prediction_tensor))
+ negative_loss = (tf.math.pow((1 - target_tensor), self._beta)*
+ tf.math.pow(prediction_tensor, self._alpha)*
+ tf.math.log(1 - prediction_tensor))
+
+ loss = -tf.where(is_present_tensor, positive_loss, negative_loss)
+ return loss * weights
+
+
+class L1LocalizationLoss(Loss):
+ """L1 loss or absolute difference.
+
+ When used in a per-pixel manner, each pixel should be given as an anchor.
+ """
+
+ def _compute_loss(self, prediction_tensor, target_tensor, weights):
+ """Compute loss function.
+
+ Args:
+ prediction_tensor: A float tensor of shape [batch_size, num_anchors]
+ representing the (encoded) predicted locations of objects.
+ target_tensor: A float tensor of shape [batch_size, num_anchors]
+ representing the regression targets
+ weights: a float tensor of shape [batch_size, num_anchors]
+
+ Returns:
+ loss: a float tensor of shape [batch_size, num_anchors] tensor
+ representing the value of the loss function.
+ """
+ return tf.losses.absolute_difference(
+ target_tensor,
+ prediction_tensor,
+ weights=weights,
+ loss_collection=None,
+ reduction=tf.losses.Reduction.NONE
+ )
diff --git a/research/object_detection/core/model.py b/research/object_detection/core/model.py
index 0430b37b5c31c6e1ce9604898aaa8e73319400f8..437ed08e1f7e1ddcc053085010ea7be5378e7be9 100644
--- a/research/object_detection/core/model.py
+++ b/research/object_detection/core/model.py
@@ -391,7 +391,9 @@ class DetectionModel(six.with_metaclass(abc.ABCMeta, _BaseClass)):
pass
@abc.abstractmethod
- def restore_map(self, fine_tune_checkpoint_type='detection'):
+ def restore_map(self,
+ fine_tune_checkpoint_type='detection',
+ load_all_detection_checkpoint_vars=False):
"""Returns a map of variables to load from a foreign checkpoint.
Returns a map of variable names to load from a checkpoint to variables in
@@ -407,6 +409,9 @@ class DetectionModel(six.with_metaclass(abc.ABCMeta, _BaseClass)):
checkpoint (with compatible variable names) or to restore from a
classification checkpoint for initialization prior to training.
Valid values: `detection`, `classification`. Default 'detection'.
+ load_all_detection_checkpoint_vars: whether to load all variables (when
+ `fine_tune_checkpoint_type` is `detection`). If False, only variables
+ within the feature extractor scope are included. Default False.
Returns:
A dict mapping variable names (to load from a checkpoint) to variables in
@@ -414,6 +419,36 @@ class DetectionModel(six.with_metaclass(abc.ABCMeta, _BaseClass)):
"""
pass
+ @abc.abstractmethod
+ def restore_from_objects(self, fine_tune_checkpoint_type='detection'):
+ """Returns a map of variables to load from a foreign checkpoint.
+
+ Returns a dictionary of Tensorflow 2 Trackable objects (e.g. tf.Module
+ or Checkpoint). This enables the model to initialize based on weights from
+ another task. For example, the feature extractor variables from a
+ classification model can be used to bootstrap training of an object
+ detector. When loading from an object detection model, the checkpoint model
+ should have the same parameters as this detection model with exception of
+ the num_classes parameter.
+
+ Note that this function is intended to be used to restore Keras-based
+ models when running Tensorflow 2, whereas restore_map (above) is intended
+ to be used to restore Slim-based models when running Tensorflow 1.x.
+
+ TODO(jonathanhuang,rathodv): Check tf_version and raise unimplemented
+ error for both restore_map and restore_from_objects depending on version.
+
+ Args:
+ fine_tune_checkpoint_type: whether to restore from a full detection
+ checkpoint (with compatible variable names) or to restore from a
+ classification checkpoint for initialization prior to training.
+ Valid values: `detection`, `classification`. Default 'detection'.
+
+ Returns:
+ A dict mapping keys to Trackable objects (tf.Module or Checkpoint).
+ """
+ pass
+
@abc.abstractmethod
def updates(self):
"""Returns a list of update operators for this model.
diff --git a/research/object_detection/core/model_test.py b/research/object_detection/core/model_test.py
index 2bb1ab343a6634ffc8df9f71378e83371921da7a..fcc36c03d4a77a78193975766b5e96b37a32b075 100644
--- a/research/object_detection/core/model_test.py
+++ b/research/object_detection/core/model_test.py
@@ -57,6 +57,9 @@ class FakeModel(model.DetectionModel):
def restore_map(self):
return {}
+ def restore_from_objects(self, fine_tune_checkpoint_type):
+ pass
+
def regularization_losses(self):
return []
diff --git a/research/object_detection/core/prefetcher.py b/research/object_detection/core/prefetcher.py
index f88fbbd39258b0cc79b5ff2fb6bbad8f4373abdf..31e93eae80e25abde3166a56d212645ed4f17a5a 100644
--- a/research/object_detection/core/prefetcher.py
+++ b/research/object_detection/core/prefetcher.py
@@ -16,10 +16,6 @@
"""Provides functions to prefetch tensors to feed into models."""
import tensorflow.compat.v1 as tf
-from object_detection.utils import tf_version
-if not tf_version.is_tf1():
- raise ValueError('`prefetcher.py` is only supported in Tensorflow 1.X')
-
def prefetch(tensor_dict, capacity):
"""Creates a prefetch queue for tensors.
diff --git a/research/object_detection/core/prefetcher_tf1_test.py b/research/object_detection/core/prefetcher_tf1_test.py
index 3c827d8000e5d74a05c37a637aa6f7013e3e1cee..95e9155e5e38c762cee915389f55f0cc69334ae9 100644
--- a/research/object_detection/core/prefetcher_tf1_test.py
+++ b/research/object_detection/core/prefetcher_tf1_test.py
@@ -18,16 +18,16 @@ from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
+import unittest
from six.moves import range
import tensorflow.compat.v1 as tf
-
-# pylint: disable=g-bad-import-order,
-from object_detection.core import prefetcher
import tf_slim as slim
-# pylint: disable=g-bad-import-order
+from object_detection.core import prefetcher
+from object_detection.utils import tf_version
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.')
class PrefetcherTest(tf.test.TestCase):
"""Test class for prefetcher."""
diff --git a/research/object_detection/core/preprocessor.py b/research/object_detection/core/preprocessor.py
index 8b8fdff5e5446f0739396eafd10b4b5d39bd14b5..a1e7ed0288692d8ad8aeb852ebef00462c3a91cd 100644
--- a/research/object_detection/core/preprocessor.py
+++ b/research/object_detection/core/preprocessor.py
@@ -569,12 +569,11 @@ def random_horizontal_flip(image,
keypoints=None,
keypoint_visibilities=None,
keypoint_flip_permutation=None,
+ probability=0.5,
seed=None,
preprocess_vars_cache=None):
"""Randomly flips the image and detections horizontally.
- The probability of flipping the image is 50%.
-
Args:
image: rank 3 float32 tensor with shape [height, width, channels].
boxes: (optional) rank 2 float32 tensor with shape [N, 4]
@@ -592,6 +591,7 @@ def random_horizontal_flip(image,
[num_instances, num_keypoints].
keypoint_flip_permutation: rank 1 int32 tensor containing the keypoint flip
permutation.
+ probability: the probability of performing this augmentation.
seed: random seed
preprocess_vars_cache: PreprocessorCache object that records previously
performed augmentations. Updated in-place. If this
@@ -636,7 +636,7 @@ def random_horizontal_flip(image,
generator_func,
preprocessor_cache.PreprocessorCache.HORIZONTAL_FLIP,
preprocess_vars_cache)
- do_a_flip_random = tf.greater(do_a_flip_random, 0.5)
+ do_a_flip_random = tf.less(do_a_flip_random, probability)
# flip image
image = tf.cond(do_a_flip_random, lambda: _flip_image(image), lambda: image)
@@ -682,6 +682,7 @@ def random_vertical_flip(image,
masks=None,
keypoints=None,
keypoint_flip_permutation=None,
+ probability=0.5,
seed=None,
preprocess_vars_cache=None):
"""Randomly flips the image and detections vertically.
@@ -703,6 +704,7 @@ def random_vertical_flip(image,
normalized coordinates.
keypoint_flip_permutation: rank 1 int32 tensor containing the keypoint flip
permutation.
+ probability: the probability of performing this augmentation.
seed: random seed
preprocess_vars_cache: PreprocessorCache object that records previously
performed augmentations. Updated in-place. If this
@@ -743,7 +745,7 @@ def random_vertical_flip(image,
do_a_flip_random = _get_or_create_preprocess_rand_vars(
generator_func, preprocessor_cache.PreprocessorCache.VERTICAL_FLIP,
preprocess_vars_cache)
- do_a_flip_random = tf.greater(do_a_flip_random, 0.5)
+ do_a_flip_random = tf.less(do_a_flip_random, probability)
# flip image
image = tf.cond(do_a_flip_random, lambda: _flip_image(image), lambda: image)
@@ -777,6 +779,8 @@ def random_rotation90(image,
boxes=None,
masks=None,
keypoints=None,
+ keypoint_rot_permutation=None,
+ probability=0.5,
seed=None,
preprocess_vars_cache=None):
"""Randomly rotates the image and detections 90 degrees counter-clockwise.
@@ -799,6 +803,9 @@ def random_rotation90(image,
keypoints: (optional) rank 3 float32 tensor with shape
[num_instances, num_keypoints, 2]. The keypoints are in y-x
normalized coordinates.
+ keypoint_rot_permutation: rank 1 int32 tensor containing the keypoint flip
+ permutation.
+ probability: the probability of performing this augmentation.
seed: random seed
preprocess_vars_cache: PreprocessorCache object that records previously
performed augmentations. Updated in-place. If this
@@ -833,7 +840,7 @@ def random_rotation90(image,
do_a_rot90_random = _get_or_create_preprocess_rand_vars(
generator_func, preprocessor_cache.PreprocessorCache.ROTATION90,
preprocess_vars_cache)
- do_a_rot90_random = tf.greater(do_a_rot90_random, 0.5)
+ do_a_rot90_random = tf.less(do_a_rot90_random, probability)
# flip image
image = tf.cond(do_a_rot90_random, lambda: _rot90_image(image),
@@ -856,7 +863,7 @@ def random_rotation90(image,
if keypoints is not None:
keypoints = tf.cond(
do_a_rot90_random,
- lambda: keypoint_ops.rot90(keypoints),
+ lambda: keypoint_ops.rot90(keypoints, keypoint_rot_permutation),
lambda: keypoints)
result.append(keypoints)
diff --git a/research/object_detection/core/preprocessor_test.py b/research/object_detection/core/preprocessor_test.py
index a535ce207aae27afafc33308704f3c28c22b5619..5ebfe9eefe1a172e02f747ca81918612efe4792c 100644
--- a/research/object_detection/core/preprocessor_test.py
+++ b/research/object_detection/core/preprocessor_test.py
@@ -19,6 +19,7 @@ from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
+import unittest
from absl.testing import parameterized
import numpy as np
import six
@@ -30,11 +31,12 @@ from object_detection.core import preprocessor
from object_detection.core import preprocessor_cache
from object_detection.core import standard_fields as fields
from object_detection.utils import test_case
+from object_detection.utils import tf_version
if six.PY2:
import mock # pylint: disable=g-import-not-at-top
else:
- from unittest import mock # pylint: disable=g-import-not-at-top
+ mock = unittest.mock # pylint: disable=g-import-not-at-top
class PreprocessorTest(test_case.TestCase, parameterized.TestCase):
@@ -118,7 +120,10 @@ class PreprocessorTest(test_case.TestCase, parameterized.TestCase):
return tf.constant(keypoints, dtype=tf.float32)
def createKeypointFlipPermutation(self):
- return np.array([0, 2, 1], dtype=np.int32)
+ return [0, 2, 1]
+
+ def createKeypointRotPermutation(self):
+ return [0, 2, 1]
def createTestLabels(self):
labels = tf.constant([1, 2], dtype=tf.int32)
@@ -910,19 +915,22 @@ class PreprocessorTest(test_case.TestCase, parameterized.TestCase):
test_keypoints=True)
def testRunRandomRotation90WithMaskAndKeypoints(self):
- preprocess_options = [(preprocessor.random_rotation90, {})]
image_height = 3
image_width = 3
images = tf.random_uniform([1, image_height, image_width, 3])
boxes = self.createTestBoxes()
masks = self.createTestMasks()
keypoints, _ = self.createTestKeypoints()
+ keypoint_rot_permutation = self.createKeypointRotPermutation()
tensor_dict = {
fields.InputDataFields.image: images,
fields.InputDataFields.groundtruth_boxes: boxes,
fields.InputDataFields.groundtruth_instance_masks: masks,
fields.InputDataFields.groundtruth_keypoints: keypoints
}
+ preprocess_options = [(preprocessor.random_rotation90, {
+ 'keypoint_rot_permutation': keypoint_rot_permutation
+ })]
preprocessor_arg_map = preprocessor.get_default_func_arg_map(
include_instance_masks=True, include_keypoints=True)
tensor_dict = preprocessor.preprocess(
@@ -2819,6 +2827,7 @@ class PreprocessorTest(test_case.TestCase, parameterized.TestCase):
self.assertAllEqual(images_shape, patched_images_shape)
self.assertAllEqual(images, patched_images)
+ @unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.')
def testAutoAugmentImage(self):
def graph_fn():
preprocessing_options = []
diff --git a/research/object_detection/core/standard_fields.py b/research/object_detection/core/standard_fields.py
index df995b4a429ec4e587d83cf8a94fb8c223ad4dca..fcfb97ae875e9cf97c195a8a998543879f202c57 100644
--- a/research/object_detection/core/standard_fields.py
+++ b/research/object_detection/core/standard_fields.py
@@ -66,6 +66,11 @@ class InputDataFields(object):
groundtruth_keypoint_weights: groundtruth weight factor for keypoints.
groundtruth_label_weights: groundtruth label weights.
groundtruth_weights: groundtruth weight factor for bounding boxes.
+ groundtruth_dp_num_points: The number of DensePose sampled points for each
+ instance.
+ groundtruth_dp_part_ids: Part indices for DensePose points.
+ groundtruth_dp_surface_coords: Image locations and UV coordinates for
+ DensePose points.
num_groundtruth_boxes: number of groundtruth boxes.
is_annotated: whether an image has been labeled or not.
true_image_shapes: true shapes of images in the resized images, as resized
@@ -108,6 +113,9 @@ class InputDataFields(object):
groundtruth_keypoint_weights = 'groundtruth_keypoint_weights'
groundtruth_label_weights = 'groundtruth_label_weights'
groundtruth_weights = 'groundtruth_weights'
+ groundtruth_dp_num_points = 'groundtruth_dp_num_points'
+ groundtruth_dp_part_ids = 'groundtruth_dp_part_ids'
+ groundtruth_dp_surface_coords = 'groundtruth_dp_surface_coords'
num_groundtruth_boxes = 'num_groundtruth_boxes'
is_annotated = 'is_annotated'
true_image_shape = 'true_image_shape'
diff --git a/research/object_detection/core/target_assigner.py b/research/object_detection/core/target_assigner.py
index 3d5453bf25007666340ba131ebb08b847ca8ba55..fd9020ebeac12c2610449afcbdd1f29dd3237f85 100644
--- a/research/object_detection/core/target_assigner.py
+++ b/research/object_detection/core/target_assigner.py
@@ -50,10 +50,12 @@ from object_detection.core import matcher as mat
from object_detection.core import region_similarity_calculator as sim_calc
from object_detection.core import standard_fields as fields
from object_detection.matchers import argmax_matcher
-from object_detection.matchers import bipartite_matcher
from object_detection.utils import shape_utils
from object_detection.utils import target_assigner_utils as ta_utils
+from object_detection.utils import tf_version
+if tf_version.is_tf1():
+ from object_detection.matchers import bipartite_matcher # pylint: disable=g-import-not-at-top
ResizeMethod = tf2.image.ResizeMethod
@@ -398,6 +400,8 @@ def create_target_assigner(reference, stage=None,
ValueError: if combination reference+stage is invalid.
"""
if reference == 'Multibox' and stage == 'proposal':
+ if tf_version.is_tf2():
+ raise ValueError('GreedyBipartiteMatcher is not supported in TF 2.X.')
similarity_calc = sim_calc.NegSqDistSimilarity()
matcher = bipartite_matcher.GreedyBipartiteMatcher()
box_coder_instance = mean_stddev_box_coder.MeanStddevBoxCoder()
@@ -713,3 +717,943 @@ def batch_assign_confidences(target_assigner,
batch_reg_weights, batch_match)
+def _smallest_positive_root(a, b, c):
+ """Returns the smallest positive root of a quadratic equation."""
+
+ discriminant = tf.sqrt(b ** 2 - 4 * a * c)
+
+ # TODO(vighneshb) We are currently using the slightly incorrect
+ # CenterNet implementation. The commented lines implement the fixed version
+ # in https://github.com/princeton-vl/CornerNet. Change the implementation
+ # after verifying it has no negative impact.
+ # root1 = (-b - discriminant) / (2 * a)
+ # root2 = (-b + discriminant) / (2 * a)
+
+ # return tf.where(tf.less(root1, 0), root2, root1)
+
+ return (-b + discriminant) / (2.0)
+
+
+def max_distance_for_overlap(height, width, min_iou):
+ """Computes how far apart bbox corners can lie while maintaining the iou.
+
+ Given a bounding box size, this function returns a lower bound on how far
+ apart the corners of another box can lie while still maintaining the given
+ IoU. The implementation is based on the `gaussian_radius` function in the
+ Objects as Points github repo: https://github.com/xingyizhou/CenterNet
+
+ Args:
+ height: A 1-D float Tensor representing height of the ground truth boxes.
+ width: A 1-D float Tensor representing width of the ground truth boxes.
+ min_iou: A float representing the minimum IoU desired.
+
+ Returns:
+ distance: A 1-D Tensor of distances, of the same length as the input
+ height and width tensors.
+ """
+
+ # Given that the detected box is displaced at a distance `d`, the exact
+ # IoU value will depend on the angle at which each corner is displaced.
+ # We simplify our computation by assuming that each corner is displaced by
+ # a distance `d` in both x and y direction. This gives us a lower IoU than
+ # what is actually realizable and ensures that any box with corners less
+ # than `d` distance apart will always have an IoU greater than or equal
+ # to `min_iou`
+
+ # The following 3 cases can be worked on geometrically and come down to
+ # solving a quadratic inequality. In each case, to ensure `min_iou` we use
+ # the smallest positive root of the equation.
+
+ # Case where detected box is offset from ground truth and no box completely
+ # contains the other.
+
+ distance_detection_offset = _smallest_positive_root(
+ a=1, b=-(height + width),
+ c=width * height * ((1 - min_iou) / (1 + min_iou))
+ )
+
+ # Case where detection is smaller than ground truth and completely contained
+ # in it.
+ distance_detection_in_gt = _smallest_positive_root(
+ a=4, b=-2 * (height + width),
+ c=(1 - min_iou) * width * height
+ )
+
+ # Case where ground truth is smaller than detection and completely contained
+ # in it.
+ distance_gt_in_detection = _smallest_positive_root(
+ a=4 * min_iou, b=(2 * min_iou) * (width + height),
+ c=(min_iou - 1) * width * height
+ )
+
+ return tf.reduce_min([distance_detection_offset,
+ distance_gt_in_detection,
+ distance_detection_in_gt], axis=0)
+
+
+def get_batch_predictions_from_indices(batch_predictions, indices):
+ """Gets the values of predictions in a batch at the given indices.
+
+ The indices are expected to come from the offset targets generation functions
+ in this library. The returned value is intended to be used inside a loss
+ function.
+
+ Args:
+ batch_predictions: A tensor of shape [batch_size, height, width, 2] for
+ single class offsets and [batch_size, height, width, class, 2] for
+ multiple classes offsets (e.g. keypoint joint offsets) representing the
+ (height, width) or (y_offset, x_offset) predictions over a batch.
+ indices: A tensor of shape [num_instances, 3] for single class offset and
+ [num_instances, 4] for multiple classes offsets representing the indices
+ in the batch to be penalized in a loss function
+
+ Returns:
+ values: A tensor of shape [num_instances, 2] holding the predicted values
+ at the given indices.
+ """
+ return tf.gather_nd(batch_predictions, indices)
+
+
+def _compute_std_dev_from_box_size(boxes_height, boxes_width, min_overlap):
+ """Computes the standard deviation of the Gaussian kernel from box size.
+
+ Args:
+ boxes_height: A 1D tensor with shape [num_instances] representing the height
+ of each box.
+ boxes_width: A 1D tensor with shape [num_instances] representing the width
+ of each box.
+ min_overlap: The minimum IOU overlap that boxes need to have to not be
+ penalized.
+
+ Returns:
+ A 1D tensor with shape [num_instances] representing the computed Gaussian
+ sigma for each of the box.
+ """
+ # We are dividing by 3 so that points closer than the computed
+ # distance have a >99% CDF.
+ sigma = max_distance_for_overlap(boxes_height, boxes_width, min_overlap)
+ sigma = (2 * tf.math.maximum(tf.math.floor(sigma), 0.0) + 1) / 6.0
+ return sigma
+
+
+class CenterNetCenterHeatmapTargetAssigner(object):
+ """Wrapper to compute the object center heatmap."""
+
+ def __init__(self, stride, min_overlap=0.7):
+ """Initializes the target assigner.
+
+ Args:
+ stride: int, the stride of the network in output pixels.
+ min_overlap: The minimum IOU overlap that boxes need to have to not be
+ penalized.
+ """
+
+ self._stride = stride
+ self._min_overlap = min_overlap
+
+ def assign_center_targets_from_boxes(self,
+ height,
+ width,
+ gt_boxes_list,
+ gt_classes_list,
+ gt_weights_list=None):
+ """Computes the object center heatmap target.
+
+ Args:
+ height: int, height of input to the model. This is used to
+ determine the height of the output.
+ width: int, width of the input to the model. This is used to
+ determine the width of the output.
+ gt_boxes_list: A list of float tensors with shape [num_boxes, 4]
+ representing the groundtruth detection bounding boxes for each sample in
+ the batch. The box coordinates are expected in normalized coordinates.
+ gt_classes_list: A list of float tensors with shape [num_boxes,
+ num_classes] representing the one-hot encoded class labels for each box
+ in the gt_boxes_list.
+ gt_weights_list: A list of float tensors with shape [num_boxes]
+ representing the weight of each groundtruth detection box.
+
+ Returns:
+ heatmap: A Tensor of size [batch_size, output_height, output_width,
+ num_classes] representing the per class center heatmap. output_height
+ and output_width are computed by dividing the input height and width by
+ the stride specified during initialization.
+ """
+
+ out_height = tf.cast(height // self._stride, tf.float32)
+ out_width = tf.cast(width // self._stride, tf.float32)
+ # Compute the yx-grid to be used to generate the heatmap. Each returned
+ # tensor has shape of [out_height, out_width]
+ (y_grid, x_grid) = ta_utils.image_shape_to_grids(out_height, out_width)
+
+ heatmaps = []
+ if gt_weights_list is None:
+ gt_weights_list = [None] * len(gt_boxes_list)
+ # TODO(vighneshb) Replace the for loop with a batch version.
+ for boxes, class_targets, weights in zip(gt_boxes_list, gt_classes_list,
+ gt_weights_list):
+ boxes = box_list.BoxList(boxes)
+ # Convert the box coordinates to absolute output image dimension space.
+ boxes = box_list_ops.to_absolute_coordinates(boxes,
+ height // self._stride,
+ width // self._stride)
+ # Get the box center coordinates. Each returned tensors have the shape of
+ # [num_instances]
+ (y_center, x_center, boxes_height,
+ boxes_width) = boxes.get_center_coordinates_and_sizes()
+
+ # Compute the sigma from box size. The tensor shape: [num_instances].
+ sigma = _compute_std_dev_from_box_size(boxes_height, boxes_width,
+ self._min_overlap)
+ # Apply the Gaussian kernel to the center coordinates. Returned heatmap
+ # has shape of [out_height, out_width, num_classes]
+ heatmap = ta_utils.coordinates_to_heatmap(
+ y_grid=y_grid,
+ x_grid=x_grid,
+ y_coordinates=y_center,
+ x_coordinates=x_center,
+ sigma=sigma,
+ channel_onehot=class_targets,
+ channel_weights=weights)
+ heatmaps.append(heatmap)
+
+ # Return the stacked heatmaps over the batch.
+ return tf.stack(heatmaps, axis=0)
+
+
+class CenterNetBoxTargetAssigner(object):
+ """Wrapper to compute target tensors for the object detection task.
+
+ This class has methods that take as input a batch of ground truth tensors
+ (in the form of a list) and return the targets required to train the object
+ detection task.
+ """
+
+ def __init__(self, stride):
+ """Initializes the target assigner.
+
+ Args:
+ stride: int, the stride of the network in output pixels.
+ """
+
+ self._stride = stride
+
+ def assign_size_and_offset_targets(self,
+ height,
+ width,
+ gt_boxes_list,
+ gt_weights_list=None):
+ """Returns the box height/width and center offset targets and their indices.
+
+ The returned values are expected to be used with predicted tensors
+ of size (batch_size, height//self._stride, width//self._stride, 2). The
+ predicted values at the relevant indices can be retrieved with the
+ get_batch_predictions_from_indices function.
+
+ Args:
+ height: int, height of input to the model. This is used to determine the
+ height of the output.
+ width: int, width of the input to the model. This is used to determine the
+ width of the output.
+ gt_boxes_list: A list of float tensors with shape [num_boxes, 4]
+ representing the groundtruth detection bounding boxes for each sample in
+ the batch. The coordinates are expected in normalized coordinates.
+ gt_weights_list: A list of tensors with shape [num_boxes] corresponding to
+ the weight of each groundtruth detection box.
+
+ Returns:
+ batch_indices: an integer tensor of shape [num_boxes, 3] holding the
+ indices inside the predicted tensor which should be penalized. The
+ first column indicates the index along the batch dimension and the
+ second and third columns indicate the index along the y and x
+ dimensions respectively.
+ batch_box_height_width: a float tensor of shape [num_boxes, 2] holding
+ expected height and width of each box in the output space.
+ batch_offsets: a float tensor of shape [num_boxes, 2] holding the
+ expected y and x offset of each box in the output space.
+ batch_weights: a float tensor of shape [num_boxes] indicating the
+ weight of each prediction.
+ """
+
+ if gt_weights_list is None:
+ gt_weights_list = [None] * len(gt_boxes_list)
+
+ batch_indices = []
+ batch_box_height_width = []
+ batch_weights = []
+ batch_offsets = []
+
+ for i, (boxes, weights) in enumerate(zip(gt_boxes_list, gt_weights_list)):
+ boxes = box_list.BoxList(boxes)
+ boxes = box_list_ops.to_absolute_coordinates(boxes,
+ height // self._stride,
+ width // self._stride)
+ # Get the box center coordinates. Each returned tensors have the shape of
+ # [num_boxes]
+ (y_center, x_center, boxes_height,
+ boxes_width) = boxes.get_center_coordinates_and_sizes()
+ num_boxes = tf.shape(x_center)
+
+ # Compute the offsets and indices of the box centers. Shape:
+ # offsets: [num_boxes, 2]
+ # indices: [num_boxes, 2]
+ (offsets, indices) = ta_utils.compute_floor_offsets_with_indices(
+ y_source=y_center, x_source=x_center)
+
+ # Assign ones if weights are not provided.
+ if weights is None:
+ weights = tf.ones(num_boxes, dtype=tf.float32)
+
+ # Shape of [num_boxes, 1] integer tensor filled with current batch index.
+ batch_index = i * tf.ones_like(indices[:, 0:1], dtype=tf.int32)
+ batch_indices.append(tf.concat([batch_index, indices], axis=1))
+ batch_box_height_width.append(
+ tf.stack([boxes_height, boxes_width], axis=1))
+ batch_weights.append(weights)
+ batch_offsets.append(offsets)
+
+ batch_indices = tf.concat(batch_indices, axis=0)
+ batch_box_height_width = tf.concat(batch_box_height_width, axis=0)
+ batch_weights = tf.concat(batch_weights, axis=0)
+ batch_offsets = tf.concat(batch_offsets, axis=0)
+ return (batch_indices, batch_box_height_width, batch_offsets, batch_weights)
+
+
+# TODO(yuhuic): Update this class to handle the instance/keypoint weights.
+# Currently those weights are used as "mask" to indicate whether an
+# instance/keypoint should be considered or not (expecting only either 0 or 1
+# value). In reality, the weights can be any value and this class should handle
+# those values properly.
+class CenterNetKeypointTargetAssigner(object):
+ """Wrapper to compute target tensors for the CenterNet keypoint estimation.
+
+ This class has methods that take as input a batch of groundtruth tensors
+ (in the form of a list) and returns the targets required to train the
+ CenterNet model for keypoint estimation. Specifically, the class methods
+ expect the groundtruth in the following formats (consistent with the
+ standard Object Detection API). Note that usually the groundtruth tensors are
+ packed with a list which represents the batch dimension:
+
+ gt_classes_list: [Required] a list of 2D tf.float32 one-hot
+ (or k-hot) tensors of shape [num_instances, num_classes] containing the
+ class targets with the 0th index assumed to map to the first non-background
+ class.
+ gt_keypoints_list: [Required] a list of 3D tf.float32 tensors of
+ shape [num_instances, num_total_keypoints, 2] containing keypoint
+ coordinates. Note that the "num_total_keypoints" should be the sum of the
+ num_keypoints over all possible keypoint types, e.g. human pose, face.
+ For example, if a dataset contains both 17 human pose keypoints and 5 face
+ keypoints, then num_total_keypoints = 17 + 5 = 22.
+ If an intance contains only a subet of keypoints (e.g. human pose keypoints
+ but not face keypoints), the face keypoints will be filled with zeros.
+ Also note that keypoints are assumed to be provided in normalized
+ coordinates and missing keypoints should be encoded as NaN.
+ gt_keypoints_weights_list: [Optional] a list 3D tf.float32 tensors of shape
+ [num_instances, num_total_keypoints] representing the weights of each
+ keypoints. If not provided, then all not NaN keypoints will be equally
+ weighted.
+ gt_boxes_list: [Optional] a list of 2D tf.float32 tensors of shape
+ [num_instances, 4] containing coordinates of the groundtruth boxes.
+ Groundtruth boxes are provided in [y_min, x_min, y_max, x_max] format and
+ assumed to be normalized and clipped relative to the image window with
+ y_min <= y_max and x_min <= x_max.
+ Note that the boxes are only used to compute the center targets but are not
+ considered as required output of the keypoint task. If the boxes were not
+ provided, the center targets will be inferred from the keypoints
+ [not implemented yet].
+ gt_weights_list: [Optional] A list of 1D tf.float32 tensors of shape
+ [num_instances] containing weights for groundtruth boxes. Only useful when
+ gt_boxes_list is also provided.
+ """
+
+ def __init__(self,
+ stride,
+ class_id,
+ keypoint_indices,
+ keypoint_std_dev=None,
+ per_keypoint_offset=False,
+ peak_radius=0):
+ """Initializes a CenterNet keypoints target assigner.
+
+ Args:
+ stride: int, the stride of the network in output pixels.
+ class_id: int, the ID of the class (0-indexed) that contains the target
+ keypoints to consider in this task. For example, if the task is human
+ pose estimation, the class id should correspond to the "human" class.
+ keypoint_indices: A list of integers representing the indices of the
+ keypoints to be considered in this task. This is used to retrieve the
+ subset of the keypoints from gt_keypoints that should be considered in
+ this task.
+ keypoint_std_dev: A list of floats represent the standard deviation of the
+ Gaussian kernel used to generate the keypoint heatmap (in the unit of
+ output pixels). It is to provide the flexibility of using different
+ sizes of Gaussian kernel for each keypoint type. If not provided, then
+ all standard deviation will be the same as the default value (10.0 in
+ the output pixel space). If provided, the length of keypoint_std_dev
+ needs to be the same as the length of keypoint_indices, indicating the
+ standard deviation of each keypoint type.
+ per_keypoint_offset: boolean, indicating whether to assign offset for
+ each keypoint channel. If set False, the output offset target will have
+ the shape [batch_size, out_height, out_width, 2]. If set True, the
+ output offset target will have the shape [batch_size, out_height,
+ out_width, 2 * num_keypoints].
+ peak_radius: int, the radius (in the unit of output pixel) around heatmap
+ peak to assign the offset targets.
+ """
+
+ self._stride = stride
+ self._class_id = class_id
+ self._keypoint_indices = keypoint_indices
+ self._per_keypoint_offset = per_keypoint_offset
+ self._peak_radius = peak_radius
+ if keypoint_std_dev is None:
+ self._keypoint_std_dev = ([_DEFAULT_KEYPOINT_OFFSET_STD_DEV] *
+ len(keypoint_indices))
+ else:
+ assert len(keypoint_indices) == len(keypoint_std_dev)
+ self._keypoint_std_dev = keypoint_std_dev
+
+ def _preprocess_keypoints_and_weights(self, out_height, out_width, keypoints,
+ class_onehot, class_weights,
+ keypoint_weights):
+ """Preprocesses the keypoints and the corresponding keypoint weights.
+
+ This function performs several common steps to preprocess the keypoints and
+ keypoint weights features, including:
+ 1) Select the subset of keypoints based on the keypoint indices, fill the
+ keypoint NaN values with zeros and convert to absoluate coordinates.
+ 2) Generate the weights of the keypoint using the following information:
+ a. The class of the instance.
+ b. The NaN value of the keypoint coordinates.
+ c. The provided keypoint weights.
+
+ Args:
+ out_height: An integer or an interger tensor indicating the output height
+ of the model.
+ out_width: An integer or an interger tensor indicating the output width of
+ the model.
+ keypoints: A float tensor of shape [num_instances, num_total_keypoints, 2]
+ representing the original keypoint grountruth coordinates.
+ class_onehot: A float tensor of shape [num_instances, num_classes]
+ containing the class targets with the 0th index assumed to map to the
+ first non-background class.
+ class_weights: A float tensor of shape [num_instances] containing weights
+ for groundtruth instances.
+ keypoint_weights: A float tensor of shape
+ [num_instances, num_total_keypoints] representing the weights of each
+ keypoints.
+
+ Returns:
+ A tuple of two tensors:
+ keypoint_absolute: A float tensor of shape
+ [num_instances, num_keypoints, 2] which is the selected and updated
+ keypoint coordinates.
+ keypoint_weights: A float tensor of shape [num_instances, num_keypoints]
+ representing the updated weight of each keypoint.
+ """
+ # Select the targets keypoints by their type ids and generate the mask
+ # of valid elements.
+ valid_mask, keypoints = ta_utils.get_valid_keypoint_mask_for_class(
+ keypoint_coordinates=keypoints,
+ class_id=self._class_id,
+ class_onehot=class_onehot,
+ class_weights=class_weights,
+ keypoint_indices=self._keypoint_indices)
+ # Keypoint coordinates in absolute coordinate system.
+ # The shape of the tensors: [num_instances, num_keypoints, 2].
+ keypoints_absolute = keypoint_ops.to_absolute_coordinates(
+ keypoints, out_height, out_width)
+ # Assign default weights for the keypoints.
+ if keypoint_weights is None:
+ keypoint_weights = tf.ones_like(keypoints[:, :, 0])
+ else:
+ keypoint_weights = tf.gather(
+ keypoint_weights, indices=self._keypoint_indices, axis=1)
+ keypoint_weights = keypoint_weights * valid_mask
+ return keypoints_absolute, keypoint_weights
+
+ def assign_keypoint_heatmap_targets(self,
+ height,
+ width,
+ gt_keypoints_list,
+ gt_classes_list,
+ gt_keypoints_weights_list=None,
+ gt_weights_list=None,
+ gt_boxes_list=None):
+ """Returns the keypoint heatmap targets for the CenterNet model.
+
+ Args:
+ height: int, height of input to the CenterNet model. This is used to
+ determine the height of the output.
+ width: int, width of the input to the CenterNet model. This is used to
+ determine the width of the output.
+ gt_keypoints_list: A list of float tensors with shape [num_instances,
+ num_total_keypoints, 2]. See class-level description for more detail.
+ gt_classes_list: A list of float tensors with shape [num_instances,
+ num_classes]. See class-level description for more detail.
+ gt_keypoints_weights_list: A list of tensors with shape [num_instances,
+ num_total_keypoints] corresponding to the weight of each keypoint.
+ gt_weights_list: A list of float tensors with shape [num_instances]. See
+ class-level description for more detail.
+ gt_boxes_list: A list of float tensors with shape [num_instances, 4]. See
+ class-level description for more detail. If provided, the keypoint
+ standard deviations will be scaled based on the box sizes.
+
+ Returns:
+ heatmap: A float tensor of shape [batch_size, output_height, output_width,
+ num_keypoints] representing the per keypoint type center heatmap.
+ output_height and output_width are computed by dividing the input height
+ and width by the stride specified during initialization. Note that the
+ "num_keypoints" is defined by the length of keypoint_indices, which is
+ not necessarily equal to "num_total_keypoints".
+ num_instances_batch: A 2D int tensor of shape
+ [batch_size, num_keypoints] representing number of instances for each
+ keypoint type.
+ valid_mask: A float tensor with shape [batch_size, output_height,
+ output_width] where all values within the regions of the blackout boxes
+ are 0.0 and 1.0 else where.
+ """
+ out_width = tf.cast(width // self._stride, tf.float32)
+ out_height = tf.cast(height // self._stride, tf.float32)
+ # Compute the yx-grid to be used to generate the heatmap. Each returned
+ # tensor has shape of [out_height, out_width]
+ y_grid, x_grid = ta_utils.image_shape_to_grids(out_height, out_width)
+
+ if gt_keypoints_weights_list is None:
+ gt_keypoints_weights_list = [None] * len(gt_keypoints_list)
+ if gt_weights_list is None:
+ gt_weights_list = [None] * len(gt_classes_list)
+ if gt_boxes_list is None:
+ gt_boxes_list = [None] * len(gt_keypoints_list)
+
+ heatmaps = []
+ num_instances_list = []
+ valid_mask_list = []
+ for keypoints, classes, kp_weights, weights, boxes in zip(
+ gt_keypoints_list, gt_classes_list, gt_keypoints_weights_list,
+ gt_weights_list, gt_boxes_list):
+ keypoints_absolute, kp_weights = self._preprocess_keypoints_and_weights(
+ out_height=out_height,
+ out_width=out_width,
+ keypoints=keypoints,
+ class_onehot=classes,
+ class_weights=weights,
+ keypoint_weights=kp_weights)
+ num_instances, num_keypoints, _ = (
+ shape_utils.combined_static_and_dynamic_shape(keypoints_absolute))
+
+ # A tensor of shape [num_instances, num_keypoints] with
+ # each element representing the type dimension for each corresponding
+ # keypoint:
+ # [[0, 1, ..., k-1],
+ # [0, 1, ..., k-1],
+ # :
+ # [0, 1, ..., k-1]]
+ keypoint_types = tf.tile(
+ input=tf.expand_dims(tf.range(num_keypoints), axis=0),
+ multiples=[num_instances, 1])
+
+ # A tensor of shape [num_instances, num_keypoints] with
+ # each element representing the sigma of the Gaussian kernel for each
+ # keypoint.
+ keypoint_std_dev = tf.tile(
+ input=tf.expand_dims(tf.constant(self._keypoint_std_dev), axis=0),
+ multiples=[num_instances, 1])
+
+ # If boxes is not None, then scale the standard deviation based on the
+ # size of the object bounding boxes similar to object center heatmap.
+ if boxes is not None:
+ boxes = box_list.BoxList(boxes)
+ # Convert the box coordinates to absolute output image dimension space.
+ boxes = box_list_ops.to_absolute_coordinates(boxes,
+ height // self._stride,
+ width // self._stride)
+ # Get the box height and width. Each returned tensors have the shape
+ # of [num_instances]
+ (_, _, boxes_height,
+ boxes_width) = boxes.get_center_coordinates_and_sizes()
+
+ # Compute the sigma from box size. The tensor shape: [num_instances].
+ sigma = _compute_std_dev_from_box_size(boxes_height, boxes_width, 0.7)
+ keypoint_std_dev = keypoint_std_dev * tf.stack(
+ [sigma] * num_keypoints, axis=1)
+
+ # Generate the valid region mask to ignore regions with target class but
+ # no corresponding keypoints.
+ # Shape: [num_instances].
+ blackout = tf.logical_and(classes[:, self._class_id] > 0,
+ tf.reduce_max(kp_weights, axis=1) < 1e-3)
+ valid_mask = ta_utils.blackout_pixel_weights_by_box_regions(
+ out_height, out_width, boxes.get(), blackout)
+ valid_mask_list.append(valid_mask)
+
+ # Apply the Gaussian kernel to the keypoint coordinates. Returned heatmap
+ # has shape of [out_height, out_width, num_keypoints].
+ heatmap = ta_utils.coordinates_to_heatmap(
+ y_grid=y_grid,
+ x_grid=x_grid,
+ y_coordinates=tf.keras.backend.flatten(keypoints_absolute[:, :, 0]),
+ x_coordinates=tf.keras.backend.flatten(keypoints_absolute[:, :, 1]),
+ sigma=tf.keras.backend.flatten(keypoint_std_dev),
+ channel_onehot=tf.one_hot(
+ tf.keras.backend.flatten(keypoint_types), depth=num_keypoints),
+ channel_weights=tf.keras.backend.flatten(kp_weights))
+ num_instances_list.append(
+ tf.cast(tf.reduce_sum(kp_weights, axis=0), dtype=tf.int32))
+ heatmaps.append(heatmap)
+ return (tf.stack(heatmaps, axis=0), tf.stack(num_instances_list, axis=0),
+ tf.stack(valid_mask_list, axis=0))
+
+ def _get_keypoint_types(self, num_instances, num_keypoints, num_neighbors):
+ """Gets keypoint type index tensor.
+
+ The function prepares the tensor of keypoint indices with shape
+ [num_instances, num_keypoints, num_neighbors]. Each element represents the
+ keypoint type index for each corresponding keypoint and tiled along the 3rd
+ axis:
+ [[0, 1, ..., num_keypoints - 1],
+ [0, 1, ..., num_keypoints - 1],
+ :
+ [0, 1, ..., num_keypoints - 1]]
+
+ Args:
+ num_instances: int, the number of instances, used to define the 1st
+ dimension.
+ num_keypoints: int, the number of keypoint types, used to define the 2nd
+ dimension.
+ num_neighbors: int, the number of neighborhood pixels to consider for each
+ keypoint, used to define the 3rd dimension.
+
+ Returns:
+ A integer tensor of shape [num_instances, num_keypoints, num_neighbors].
+ """
+ keypoint_types = tf.range(num_keypoints)[tf.newaxis, :, tf.newaxis]
+ tiled_keypoint_types = tf.tile(keypoint_types,
+ multiples=[num_instances, 1, num_neighbors])
+ return tiled_keypoint_types
+
+ def assign_keypoints_offset_targets(self,
+ height,
+ width,
+ gt_keypoints_list,
+ gt_classes_list,
+ gt_keypoints_weights_list=None,
+ gt_weights_list=None):
+ """Returns the offsets and indices of the keypoints for location refinement.
+
+ The returned values are used to refine the location of each keypoints in the
+ heatmap. The predicted values at the relevant indices can be retrieved with
+ the get_batch_predictions_from_indices function.
+
+ Args:
+ height: int, height of input to the CenterNet model. This is used to
+ determine the height of the output.
+ width: int, width of the input to the CenterNet model. This is used to
+ determine the width of the output.
+ gt_keypoints_list: A list of tensors with shape [num_instances,
+ num_total_keypoints]. See class-level description for more detail.
+ gt_classes_list: A list of tensors with shape [num_instances,
+ num_classes]. See class-level description for more detail.
+ gt_keypoints_weights_list: A list of tensors with shape [num_instances,
+ num_total_keypoints] corresponding to the weight of each keypoint.
+ gt_weights_list: A list of float tensors with shape [num_instances]. See
+ class-level description for more detail.
+
+ Returns:
+ batch_indices: an integer tensor of shape [num_total_instances, 3] (or
+ [num_total_instances, 4] if 'per_keypoint_offset' is set True) holding
+ the indices inside the predicted tensor which should be penalized. The
+ first column indicates the index along the batch dimension and the
+ second and third columns indicate the index along the y and x
+ dimensions respectively. The fourth column corresponds to the channel
+ dimension (if 'per_keypoint_offset' is set True).
+ batch_offsets: a float tensor of shape [num_total_instances, 2] holding
+ the expected y and x offset of each box in the output space.
+ batch_weights: a float tensor of shape [num_total_instances] indicating
+ the weight of each prediction.
+ Note that num_total_instances = batch_size * num_instances *
+ num_keypoints * num_neighbors
+ """
+
+ batch_indices = []
+ batch_offsets = []
+ batch_weights = []
+
+ if gt_keypoints_weights_list is None:
+ gt_keypoints_weights_list = [None] * len(gt_keypoints_list)
+ if gt_weights_list is None:
+ gt_weights_list = [None] * len(gt_classes_list)
+ for i, (keypoints, classes, kp_weights, weights) in enumerate(
+ zip(gt_keypoints_list, gt_classes_list, gt_keypoints_weights_list,
+ gt_weights_list)):
+ keypoints_absolute, kp_weights = self._preprocess_keypoints_and_weights(
+ out_height=height // self._stride,
+ out_width=width // self._stride,
+ keypoints=keypoints,
+ class_onehot=classes,
+ class_weights=weights,
+ keypoint_weights=kp_weights)
+ num_instances, num_keypoints, _ = (
+ shape_utils.combined_static_and_dynamic_shape(keypoints_absolute))
+
+ # [num_instances * num_keypoints]
+ y_source = tf.keras.backend.flatten(keypoints_absolute[:, :, 0])
+ x_source = tf.keras.backend.flatten(keypoints_absolute[:, :, 1])
+
+ # All keypoint coordinates and their neighbors:
+ # [num_instance * num_keypoints, num_neighbors]
+ (y_source_neighbors, x_source_neighbors,
+ valid_sources) = ta_utils.get_surrounding_grids(height // self._stride,
+ width // self._stride,
+ y_source, x_source,
+ self._peak_radius)
+ _, num_neighbors = shape_utils.combined_static_and_dynamic_shape(
+ y_source_neighbors)
+
+ # Update the valid keypoint weights.
+ # [num_instance * num_keypoints, num_neighbors]
+ valid_keypoints = tf.cast(
+ valid_sources, dtype=tf.float32) * tf.stack(
+ [tf.keras.backend.flatten(kp_weights)] * num_neighbors, axis=-1)
+
+ # Compute the offsets and indices of the box centers. Shape:
+ # offsets: [num_instances * num_keypoints, num_neighbors, 2]
+ # indices: [num_instances * num_keypoints, num_neighbors, 2]
+ offsets, indices = ta_utils.compute_floor_offsets_with_indices(
+ y_source=y_source_neighbors,
+ x_source=x_source_neighbors,
+ y_target=y_source,
+ x_target=x_source)
+ # Reshape to:
+ # offsets: [num_instances * num_keypoints * num_neighbors, 2]
+ # indices: [num_instances * num_keypoints * num_neighbors, 2]
+ offsets = tf.reshape(offsets, [-1, 2])
+ indices = tf.reshape(indices, [-1, 2])
+
+ # Prepare the batch indices to be prepended.
+ batch_index = tf.fill(
+ [num_instances * num_keypoints * num_neighbors, 1], i)
+ if self._per_keypoint_offset:
+ tiled_keypoint_types = self._get_keypoint_types(
+ num_instances, num_keypoints, num_neighbors)
+ batch_indices.append(
+ tf.concat([batch_index, indices,
+ tf.reshape(tiled_keypoint_types, [-1, 1])], axis=1))
+ else:
+ batch_indices.append(tf.concat([batch_index, indices], axis=1))
+ batch_offsets.append(offsets)
+ batch_weights.append(tf.keras.backend.flatten(valid_keypoints))
+
+ # Concatenate the tensors in the batch in the first dimension:
+ # shape: [batch_size * num_instances * num_keypoints * num_neighbors, 3] or
+ # [batch_size * num_instances * num_keypoints * num_neighbors, 4] if
+ # 'per_keypoint_offset' is set to True.
+ batch_indices = tf.concat(batch_indices, axis=0)
+ # shape: [batch_size * num_instances * num_keypoints * num_neighbors]
+ batch_weights = tf.concat(batch_weights, axis=0)
+ # shape: [batch_size * num_instances * num_keypoints * num_neighbors, 2]
+ batch_offsets = tf.concat(batch_offsets, axis=0)
+ return (batch_indices, batch_offsets, batch_weights)
+
+ def assign_joint_regression_targets(self,
+ height,
+ width,
+ gt_keypoints_list,
+ gt_classes_list,
+ gt_boxes_list=None,
+ gt_keypoints_weights_list=None,
+ gt_weights_list=None):
+ """Returns the joint regression from center grid to keypoints.
+
+ The joint regression is used as the grouping cue from the estimated
+ keypoints to instance center. The offsets are the vectors from the floored
+ object center coordinates to the keypoint coordinates.
+
+ Args:
+ height: int, height of input to the CenterNet model. This is used to
+ determine the height of the output.
+ width: int, width of the input to the CenterNet model. This is used to
+ determine the width of the output.
+ gt_keypoints_list: A list of float tensors with shape [num_instances,
+ num_total_keypoints]. See class-level description for more detail.
+ gt_classes_list: A list of float tensors with shape [num_instances,
+ num_classes]. See class-level description for more detail.
+ gt_boxes_list: A list of float tensors with shape [num_instances, 4]. See
+ class-level description for more detail. If provided, then the center
+ targets will be computed based on the center of the boxes.
+ gt_keypoints_weights_list: A list of float tensors with shape
+ [num_instances, num_total_keypoints] representing to the weight of each
+ keypoint.
+ gt_weights_list: A list of float tensors with shape [num_instances]. See
+ class-level description for more detail.
+
+ Returns:
+ batch_indices: an integer tensor of shape [num_instances, 4] holding the
+ indices inside the predicted tensor which should be penalized. The
+ first column indicates the index along the batch dimension and the
+ second and third columns indicate the index along the y and x
+ dimensions respectively, the last dimension refers to the keypoint type
+ dimension.
+ batch_offsets: a float tensor of shape [num_instances, 2] holding the
+ expected y and x offset of each box in the output space.
+ batch_weights: a float tensor of shape [num_instances] indicating the
+ weight of each prediction.
+ Note that num_total_instances = batch_size * num_instances * num_keypoints
+
+ Raises:
+ NotImplementedError: currently the object center coordinates need to be
+ computed from groundtruth bounding boxes. The functionality of
+ generating the object center coordinates from keypoints is not
+ implemented yet.
+ """
+
+ batch_indices = []
+ batch_offsets = []
+ batch_weights = []
+ batch_size = len(gt_keypoints_list)
+ if gt_keypoints_weights_list is None:
+ gt_keypoints_weights_list = [None] * batch_size
+ if gt_boxes_list is None:
+ gt_boxes_list = [None] * batch_size
+ if gt_weights_list is None:
+ gt_weights_list = [None] * len(gt_classes_list)
+ for i, (keypoints, classes, boxes, kp_weights, weights) in enumerate(
+ zip(gt_keypoints_list, gt_classes_list,
+ gt_boxes_list, gt_keypoints_weights_list, gt_weights_list)):
+ keypoints_absolute, kp_weights = self._preprocess_keypoints_and_weights(
+ out_height=height // self._stride,
+ out_width=width // self._stride,
+ keypoints=keypoints,
+ class_onehot=classes,
+ class_weights=weights,
+ keypoint_weights=kp_weights)
+ num_instances, num_keypoints, _ = (
+ shape_utils.combined_static_and_dynamic_shape(keypoints_absolute))
+
+ # If boxes are provided, compute the joint center from it.
+ if boxes is not None:
+ # Compute joint center from boxes.
+ boxes = box_list.BoxList(boxes)
+ boxes = box_list_ops.to_absolute_coordinates(boxes,
+ height // self._stride,
+ width // self._stride)
+ y_center, x_center, _, _ = boxes.get_center_coordinates_and_sizes()
+ else:
+ # TODO(yuhuic): Add the logic to generate object centers from keypoints.
+ raise NotImplementedError((
+ 'The functionality of generating object centers from keypoints is'
+ ' not implemented yet. Please provide groundtruth bounding boxes.'
+ ))
+
+ # Tile the yx center coordinates to be the same shape as keypoints.
+ y_center_tiled = tf.tile(
+ tf.reshape(y_center, shape=[num_instances, 1]),
+ multiples=[1, num_keypoints])
+ x_center_tiled = tf.tile(
+ tf.reshape(x_center, shape=[num_instances, 1]),
+ multiples=[1, num_keypoints])
+ # [num_instance * num_keypoints, num_neighbors]
+ (y_source_neighbors, x_source_neighbors,
+ valid_sources) = ta_utils.get_surrounding_grids(
+ height // self._stride, width // self._stride,
+ tf.keras.backend.flatten(y_center_tiled),
+ tf.keras.backend.flatten(x_center_tiled), self._peak_radius)
+
+ _, num_neighbors = shape_utils.combined_static_and_dynamic_shape(
+ y_source_neighbors)
+ valid_keypoints = tf.cast(
+ valid_sources, dtype=tf.float32) * tf.stack(
+ [tf.keras.backend.flatten(kp_weights)] * num_neighbors, axis=-1)
+
+ # Compute the offsets and indices of the box centers. Shape:
+ # offsets: [num_instances * num_keypoints, 2]
+ # indices: [num_instances * num_keypoints, 2]
+ (offsets, indices) = ta_utils.compute_floor_offsets_with_indices(
+ y_source=y_source_neighbors,
+ x_source=x_source_neighbors,
+ y_target=tf.keras.backend.flatten(keypoints_absolute[:, :, 0]),
+ x_target=tf.keras.backend.flatten(keypoints_absolute[:, :, 1]))
+ # Reshape to:
+ # offsets: [num_instances * num_keypoints * num_neighbors, 2]
+ # indices: [num_instances * num_keypoints * num_neighbors, 2]
+ offsets = tf.reshape(offsets, [-1, 2])
+ indices = tf.reshape(indices, [-1, 2])
+
+ # keypoint type tensor: [num_instances, num_keypoints, num_neighbors].
+ tiled_keypoint_types = self._get_keypoint_types(
+ num_instances, num_keypoints, num_neighbors)
+
+ batch_index = tf.fill(
+ [num_instances * num_keypoints * num_neighbors, 1], i)
+ batch_indices.append(
+ tf.concat([batch_index, indices,
+ tf.reshape(tiled_keypoint_types, [-1, 1])], axis=1))
+ batch_offsets.append(offsets)
+ batch_weights.append(tf.keras.backend.flatten(valid_keypoints))
+
+ # Concatenate the tensors in the batch in the first dimension:
+ # shape: [batch_size * num_instances * num_keypoints, 4]
+ batch_indices = tf.concat(batch_indices, axis=0)
+ # shape: [batch_size * num_instances * num_keypoints]
+ batch_weights = tf.concat(batch_weights, axis=0)
+ # shape: [batch_size * num_instances * num_keypoints, 2]
+ batch_offsets = tf.concat(batch_offsets, axis=0)
+ return (batch_indices, batch_offsets, batch_weights)
+
+
+class CenterNetMaskTargetAssigner(object):
+ """Wrapper to compute targets for segmentation masks."""
+
+ def __init__(self, stride):
+ self._stride = stride
+
+ def assign_segmentation_targets(
+ self, gt_masks_list, gt_classes_list,
+ mask_resize_method=ResizeMethod.BILINEAR):
+ """Computes the segmentation targets.
+
+ This utility produces a semantic segmentation mask for each class, starting
+ with whole image instance segmentation masks. Effectively, each per-class
+ segmentation target is the union of all masks from that class.
+
+ Args:
+ gt_masks_list: A list of float tensors with shape [num_boxes,
+ input_height, input_width] with values in {0, 1} representing instance
+ masks for each object.
+ gt_classes_list: A list of float tensors with shape [num_boxes,
+ num_classes] representing the one-hot encoded class labels for each box
+ in the gt_boxes_list.
+ mask_resize_method: A `tf.compat.v2.image.ResizeMethod`. The method to use
+ when resizing masks from input resolution to output resolution.
+
+ Returns:
+ segmentation_targets: An int32 tensor of size [batch_size, output_height,
+ output_width, num_classes] representing the class of each location in
+ the output space.
+ """
+ # TODO(ronnyvotel): Handle groundtruth weights.
+ _, num_classes = shape_utils.combined_static_and_dynamic_shape(
+ gt_classes_list[0])
+
+ _, input_height, input_width = (
+ shape_utils.combined_static_and_dynamic_shape(gt_masks_list[0]))
+ output_height = input_height // self._stride
+ output_width = input_width // self._stride
+
+ segmentation_targets_list = []
+ for gt_masks, gt_classes in zip(gt_masks_list, gt_classes_list):
+ # Resize segmentation masks to conform to output dimensions. Use TF2
+ # image resize because TF1's version is buggy:
+ # https://yaqs.corp.google.com/eng/q/4970450458378240
+ gt_masks = tf2.image.resize(
+ gt_masks[:, :, :, tf.newaxis],
+ size=(output_height, output_width),
+ method=mask_resize_method)
+ gt_classes_reshaped = tf.reshape(gt_classes, [-1, 1, 1, num_classes])
+ # Shape: [h, w, num_classes].
+ segmentations_for_image = tf.reduce_max(
+ gt_masks * gt_classes_reshaped, axis=0)
+ segmentation_targets_list.append(segmentations_for_image)
+
+ segmentation_target = tf.stack(segmentation_targets_list, axis=0)
+ return segmentation_target
diff --git a/research/object_detection/core/target_assigner_test.py b/research/object_detection/core/target_assigner_test.py
index fb0a63bd1342b8826da56654d647656f736964ae..5a0ca43e558beac27076bce02ae8f62d6624d6f7 100644
--- a/research/object_detection/core/target_assigner_test.py
+++ b/research/object_detection/core/target_assigner_test.py
@@ -24,9 +24,9 @@ from object_detection.core import region_similarity_calculator
from object_detection.core import standard_fields as fields
from object_detection.core import target_assigner as targetassigner
from object_detection.matchers import argmax_matcher
-from object_detection.matchers import bipartite_matcher
from object_detection.utils import np_box_ops
from object_detection.utils import test_case
+from object_detection.utils import tf_version
class TargetAssignerTest(test_case.TestCase):
@@ -439,7 +439,7 @@ class TargetAssignerTest(test_case.TestCase):
def test_raises_error_on_incompatible_groundtruth_boxes_and_labels(self):
similarity_calc = region_similarity_calculator.NegSqDistSimilarity()
- matcher = bipartite_matcher.GreedyBipartiteMatcher()
+ matcher = argmax_matcher.ArgMaxMatcher(0.5)
box_coder = mean_stddev_box_coder.MeanStddevBoxCoder()
unmatched_class_label = tf.constant([1, 0, 0, 0, 0, 0, 0], tf.float32)
target_assigner = targetassigner.TargetAssigner(
@@ -469,7 +469,7 @@ class TargetAssignerTest(test_case.TestCase):
def test_raises_error_on_invalid_groundtruth_labels(self):
similarity_calc = region_similarity_calculator.NegSqDistSimilarity()
- matcher = bipartite_matcher.GreedyBipartiteMatcher()
+ matcher = argmax_matcher.ArgMaxMatcher(0.5)
box_coder = mean_stddev_box_coder.MeanStddevBoxCoder(stddev=1.0)
unmatched_class_label = tf.constant([[0, 0], [0, 0], [0, 0]], tf.float32)
target_assigner = targetassigner.TargetAssigner(
@@ -1191,7 +1191,7 @@ class BatchTargetAssignConfidencesTest(test_case.TestCase):
])
-class CreateTargetAssignerTest(tf.test.TestCase):
+class CreateTargetAssignerTest(test_case.TestCase):
def test_create_target_assigner(self):
"""Tests that named constructor gives working target assigners.
@@ -1202,9 +1202,10 @@ class CreateTargetAssignerTest(tf.test.TestCase):
groundtruth = box_list.BoxList(tf.constant(corners))
priors = box_list.BoxList(tf.constant(corners))
- multibox_ta = (targetassigner
- .create_target_assigner('Multibox', stage='proposal'))
- multibox_ta.assign(priors, groundtruth)
+ if tf_version.is_tf1():
+ multibox_ta = (targetassigner
+ .create_target_assigner('Multibox', stage='proposal'))
+ multibox_ta.assign(priors, groundtruth)
# No tests on output, as that may vary arbitrarily as new target assigners
# are added. As long as it is constructed correctly and runs without errors,
# tests on the individual assigners cover correctness of the assignments.
@@ -1229,6 +1230,681 @@ class CreateTargetAssignerTest(tf.test.TestCase):
stage='invalid_stage')
+def _array_argmax(array):
+ return np.unravel_index(np.argmax(array), array.shape)
+
+
+class CenterNetCenterHeatmapTargetAssignerTest(test_case.TestCase):
+
+ def setUp(self):
+ super(CenterNetCenterHeatmapTargetAssignerTest, self).setUp()
+
+ self._box_center = [0.0, 0.0, 1.0, 1.0]
+ self._box_center_small = [0.25, 0.25, 0.75, 0.75]
+ self._box_lower_left = [0.5, 0.0, 1.0, 0.5]
+ self._box_center_offset = [0.1, 0.05, 1.0, 1.0]
+ self._box_odd_coordinates = [0.1625, 0.2125, 0.5625, 0.9625]
+
+ def test_center_location(self):
+ """Test that the centers are at the correct location."""
+ def graph_fn():
+ box_batch = [tf.constant([self._box_center, self._box_lower_left])]
+ classes = [
+ tf.one_hot([0, 1], depth=4),
+ ]
+ assigner = targetassigner.CenterNetCenterHeatmapTargetAssigner(4)
+ targets = assigner.assign_center_targets_from_boxes(80, 80, box_batch,
+ classes)
+ return targets
+ targets = self.execute(graph_fn, [])
+ self.assertEqual((10, 10), _array_argmax(targets[0, :, :, 0]))
+ self.assertAlmostEqual(1.0, targets[0, 10, 10, 0])
+ self.assertEqual((15, 5), _array_argmax(targets[0, :, :, 1]))
+ self.assertAlmostEqual(1.0, targets[0, 15, 5, 1])
+
+ def test_center_batch_shape(self):
+ """Test that the shape of the target for a batch is correct."""
+ def graph_fn():
+ box_batch = [
+ tf.constant([self._box_center, self._box_lower_left]),
+ tf.constant([self._box_center]),
+ tf.constant([self._box_center_small]),
+ ]
+ classes = [
+ tf.one_hot([0, 1], depth=4),
+ tf.one_hot([2], depth=4),
+ tf.one_hot([3], depth=4),
+ ]
+ assigner = targetassigner.CenterNetCenterHeatmapTargetAssigner(4)
+ targets = assigner.assign_center_targets_from_boxes(80, 80, box_batch,
+ classes)
+ return targets
+ targets = self.execute(graph_fn, [])
+ self.assertEqual((3, 20, 20, 4), targets.shape)
+
+ def test_center_overlap_maximum(self):
+ """Test that when boxes overlap we, are computing the maximum."""
+ def graph_fn():
+ box_batch = [
+ tf.constant([
+ self._box_center, self._box_center_offset, self._box_center,
+ self._box_center_offset
+ ])
+ ]
+ classes = [
+ tf.one_hot([0, 0, 1, 2], depth=4),
+ ]
+
+ assigner = targetassigner.CenterNetCenterHeatmapTargetAssigner(4)
+ targets = assigner.assign_center_targets_from_boxes(80, 80, box_batch,
+ classes)
+ return targets
+ targets = self.execute(graph_fn, [])
+ class0_targets = targets[0, :, :, 0]
+ class1_targets = targets[0, :, :, 1]
+ class2_targets = targets[0, :, :, 2]
+ np.testing.assert_allclose(class0_targets,
+ np.maximum(class1_targets, class2_targets))
+
+ def test_size_blur(self):
+ """Test that the heatmap of a larger box is more blurred."""
+ def graph_fn():
+ box_batch = [tf.constant([self._box_center, self._box_center_small])]
+
+ classes = [
+ tf.one_hot([0, 1], depth=4),
+ ]
+ assigner = targetassigner.CenterNetCenterHeatmapTargetAssigner(4)
+ targets = assigner.assign_center_targets_from_boxes(80, 80, box_batch,
+ classes)
+ return targets
+ targets = self.execute(graph_fn, [])
+ self.assertGreater(
+ np.count_nonzero(targets[:, :, :, 0]),
+ np.count_nonzero(targets[:, :, :, 1]))
+
+ def test_weights(self):
+ """Test that the weights correctly ignore ground truth."""
+ def graph1_fn():
+ box_batch = [
+ tf.constant([self._box_center, self._box_lower_left]),
+ tf.constant([self._box_center]),
+ tf.constant([self._box_center_small]),
+ ]
+ classes = [
+ tf.one_hot([0, 1], depth=4),
+ tf.one_hot([2], depth=4),
+ tf.one_hot([3], depth=4),
+ ]
+ assigner = targetassigner.CenterNetCenterHeatmapTargetAssigner(4)
+ targets = assigner.assign_center_targets_from_boxes(80, 80, box_batch,
+ classes)
+ return targets
+
+ targets = self.execute(graph1_fn, [])
+ self.assertAlmostEqual(1.0, targets[0, :, :, 0].max())
+ self.assertAlmostEqual(1.0, targets[0, :, :, 1].max())
+ self.assertAlmostEqual(1.0, targets[1, :, :, 2].max())
+ self.assertAlmostEqual(1.0, targets[2, :, :, 3].max())
+ self.assertAlmostEqual(0.0, targets[0, :, :, [2, 3]].max())
+ self.assertAlmostEqual(0.0, targets[1, :, :, [0, 1, 3]].max())
+ self.assertAlmostEqual(0.0, targets[2, :, :, :3].max())
+
+ def graph2_fn():
+ weights = [
+ tf.constant([0., 1.]),
+ tf.constant([1.]),
+ tf.constant([1.]),
+ ]
+ box_batch = [
+ tf.constant([self._box_center, self._box_lower_left]),
+ tf.constant([self._box_center]),
+ tf.constant([self._box_center_small]),
+ ]
+ classes = [
+ tf.one_hot([0, 1], depth=4),
+ tf.one_hot([2], depth=4),
+ tf.one_hot([3], depth=4),
+ ]
+ assigner = targetassigner.CenterNetCenterHeatmapTargetAssigner(4)
+ targets = assigner.assign_center_targets_from_boxes(80, 80, box_batch,
+ classes,
+ weights)
+ return targets
+ targets = self.execute(graph2_fn, [])
+ self.assertAlmostEqual(1.0, targets[0, :, :, 1].max())
+ self.assertAlmostEqual(1.0, targets[1, :, :, 2].max())
+ self.assertAlmostEqual(1.0, targets[2, :, :, 3].max())
+ self.assertAlmostEqual(0.0, targets[0, :, :, [0, 2, 3]].max())
+ self.assertAlmostEqual(0.0, targets[1, :, :, [0, 1, 3]].max())
+ self.assertAlmostEqual(0.0, targets[2, :, :, :3].max())
+
+ def test_low_overlap(self):
+ def graph1_fn():
+ box_batch = [tf.constant([self._box_center])]
+ classes = [
+ tf.one_hot([0], depth=2),
+ ]
+ assigner = targetassigner.CenterNetCenterHeatmapTargetAssigner(
+ 4, min_overlap=0.1)
+ targets_low_overlap = assigner.assign_center_targets_from_boxes(
+ 80, 80, box_batch, classes)
+ return targets_low_overlap
+ targets_low_overlap = self.execute(graph1_fn, [])
+ self.assertLess(1, np.count_nonzero(targets_low_overlap))
+
+ def graph2_fn():
+ box_batch = [tf.constant([self._box_center])]
+ classes = [
+ tf.one_hot([0], depth=2),
+ ]
+ assigner = targetassigner.CenterNetCenterHeatmapTargetAssigner(
+ 4, min_overlap=0.6)
+ targets_medium_overlap = assigner.assign_center_targets_from_boxes(
+ 80, 80, box_batch, classes)
+ return targets_medium_overlap
+ targets_medium_overlap = self.execute(graph2_fn, [])
+ self.assertLess(1, np.count_nonzero(targets_medium_overlap))
+
+ def graph3_fn():
+ box_batch = [tf.constant([self._box_center])]
+ classes = [
+ tf.one_hot([0], depth=2),
+ ]
+ assigner = targetassigner.CenterNetCenterHeatmapTargetAssigner(
+ 4, min_overlap=0.99)
+ targets_high_overlap = assigner.assign_center_targets_from_boxes(
+ 80, 80, box_batch, classes)
+ return targets_high_overlap
+
+ targets_high_overlap = self.execute(graph3_fn, [])
+ self.assertTrue(np.all(targets_low_overlap >= targets_medium_overlap))
+ self.assertTrue(np.all(targets_medium_overlap >= targets_high_overlap))
+
+ def test_empty_box_list(self):
+ """Test that an empty box list gives an all 0 heatmap."""
+ def graph_fn():
+ box_batch = [
+ tf.zeros((0, 4), dtype=tf.float32),
+ ]
+
+ classes = [
+ tf.zeros((0, 5), dtype=tf.float32),
+ ]
+
+ assigner = targetassigner.CenterNetCenterHeatmapTargetAssigner(
+ 4, min_overlap=0.1)
+ targets = assigner.assign_center_targets_from_boxes(
+ 80, 80, box_batch, classes)
+ return targets
+ targets = self.execute(graph_fn, [])
+ np.testing.assert_allclose(targets, 0.)
+
+
+class CenterNetBoxTargetAssignerTest(test_case.TestCase):
+
+ def setUp(self):
+ super(CenterNetBoxTargetAssignerTest, self).setUp()
+ self._box_center = [0.0, 0.0, 1.0, 1.0]
+ self._box_center_small = [0.25, 0.25, 0.75, 0.75]
+ self._box_lower_left = [0.5, 0.0, 1.0, 0.5]
+ self._box_center_offset = [0.1, 0.05, 1.0, 1.0]
+ self._box_odd_coordinates = [0.1625, 0.2125, 0.5625, 0.9625]
+
+ def test_max_distance_for_overlap(self):
+ """Test that the distance ensures the IoU with random boxes."""
+
+ # TODO(vighneshb) remove this after the `_smallest_positive_root`
+ # function if fixed.
+ self.skipTest(('Skipping test because we are using an incorrect version of'
+ 'the `max_distance_for_overlap` function to reproduce'
+ ' results.'))
+
+ rng = np.random.RandomState(0)
+ n_samples = 100
+
+ width = rng.uniform(1, 100, size=n_samples)
+ height = rng.uniform(1, 100, size=n_samples)
+ min_iou = rng.uniform(0.1, 1.0, size=n_samples)
+
+ def graph_fn():
+ max_dist = targetassigner.max_distance_for_overlap(height, width, min_iou)
+ return max_dist
+ max_dist = self.execute(graph_fn, [])
+ xmin1 = np.zeros(n_samples)
+ ymin1 = np.zeros(n_samples)
+ xmax1 = np.zeros(n_samples) + width
+ ymax1 = np.zeros(n_samples) + height
+
+ xmin2 = max_dist * np.cos(rng.uniform(0, 2 * np.pi))
+ ymin2 = max_dist * np.sin(rng.uniform(0, 2 * np.pi))
+ xmax2 = width + max_dist * np.cos(rng.uniform(0, 2 * np.pi))
+ ymax2 = height + max_dist * np.sin(rng.uniform(0, 2 * np.pi))
+
+ boxes1 = np.vstack([ymin1, xmin1, ymax1, xmax1]).T
+ boxes2 = np.vstack([ymin2, xmin2, ymax2, xmax2]).T
+
+ iou = np.diag(np_box_ops.iou(boxes1, boxes2))
+
+ self.assertTrue(np.all(iou >= min_iou))
+
+ def test_max_distance_for_overlap_centernet(self):
+ """Test the version of the function used in the CenterNet paper."""
+
+ def graph_fn():
+ distance = targetassigner.max_distance_for_overlap(10, 5, 0.5)
+ return distance
+ distance = self.execute(graph_fn, [])
+ self.assertAlmostEqual(2.807764064, distance)
+
+ def test_assign_size_and_offset_targets(self):
+ """Test the assign_size_and_offset_targets function."""
+ def graph_fn():
+ box_batch = [
+ tf.constant([self._box_center, self._box_lower_left]),
+ tf.constant([self._box_center_offset]),
+ tf.constant([self._box_center_small, self._box_odd_coordinates]),
+ ]
+
+ assigner = targetassigner.CenterNetBoxTargetAssigner(4)
+ indices, hw, yx_offset, weights = assigner.assign_size_and_offset_targets(
+ 80, 80, box_batch)
+ return indices, hw, yx_offset, weights
+ indices, hw, yx_offset, weights = self.execute(graph_fn, [])
+ self.assertEqual(indices.shape, (5, 3))
+ self.assertEqual(hw.shape, (5, 2))
+ self.assertEqual(yx_offset.shape, (5, 2))
+ self.assertEqual(weights.shape, (5,))
+ np.testing.assert_array_equal(
+ indices,
+ [[0, 10, 10], [0, 15, 5], [1, 11, 10], [2, 10, 10], [2, 7, 11]])
+ np.testing.assert_array_equal(
+ hw, [[20, 20], [10, 10], [18, 19], [10, 10], [8, 15]])
+ np.testing.assert_array_equal(
+ yx_offset, [[0, 0], [0, 0], [0, 0.5], [0, 0], [0.25, 0.75]])
+ np.testing.assert_array_equal(weights, 1)
+
+ def test_assign_size_and_offset_targets_weights(self):
+ """Test the assign_size_and_offset_targets function with box weights."""
+ def graph_fn():
+ box_batch = [
+ tf.constant([self._box_center, self._box_lower_left]),
+ tf.constant([self._box_lower_left, self._box_center_small]),
+ tf.constant([self._box_center_small, self._box_odd_coordinates]),
+ ]
+
+ cn_assigner = targetassigner.CenterNetBoxTargetAssigner(4)
+ weights_batch = [
+ tf.constant([0.0, 1.0]),
+ tf.constant([1.0, 1.0]),
+ tf.constant([0.0, 0.0])
+ ]
+ indices, hw, yx_offset, weights = cn_assigner.assign_size_and_offset_targets(
+ 80, 80, box_batch, weights_batch)
+ return indices, hw, yx_offset, weights
+ indices, hw, yx_offset, weights = self.execute(graph_fn, [])
+ self.assertEqual(indices.shape, (6, 3))
+ self.assertEqual(hw.shape, (6, 2))
+ self.assertEqual(yx_offset.shape, (6, 2))
+ self.assertEqual(weights.shape, (6,))
+ np.testing.assert_array_equal(indices,
+ [[0, 10, 10], [0, 15, 5], [1, 15, 5],
+ [1, 10, 10], [2, 10, 10], [2, 7, 11]])
+ np.testing.assert_array_equal(
+ hw, [[20, 20], [10, 10], [10, 10], [10, 10], [10, 10], [8, 15]])
+ np.testing.assert_array_equal(
+ yx_offset, [[0, 0], [0, 0], [0, 0], [0, 0], [0, 0], [0.25, 0.75]])
+ np.testing.assert_array_equal(weights, [0, 1, 1, 1, 0, 0])
+
+ def test_get_batch_predictions_from_indices(self):
+ """Test the get_batch_predictions_from_indices function.
+
+ This test verifies that the indices returned by
+ assign_size_and_offset_targets function work as expected with a predicted
+ tensor.
+
+ """
+ def graph_fn():
+ box_batch = [
+ tf.constant([self._box_center, self._box_lower_left]),
+ tf.constant([self._box_center_small, self._box_odd_coordinates]),
+ ]
+
+ pred_array = np.ones((2, 40, 20, 2), dtype=np.int32) * -1000
+ pred_array[0, 20, 10] = [1, 2]
+ pred_array[0, 30, 5] = [3, 4]
+ pred_array[1, 20, 10] = [5, 6]
+ pred_array[1, 14, 11] = [7, 8]
+
+ pred_tensor = tf.constant(pred_array)
+
+ cn_assigner = targetassigner.CenterNetBoxTargetAssigner(4)
+ indices, _, _, _ = cn_assigner.assign_size_and_offset_targets(
+ 160, 80, box_batch)
+
+ preds = targetassigner.get_batch_predictions_from_indices(
+ pred_tensor, indices)
+ return preds
+ preds = self.execute(graph_fn, [])
+ np.testing.assert_array_equal(preds, [[1, 2], [3, 4], [5, 6], [7, 8]])
+
+
+class CenterNetKeypointTargetAssignerTest(test_case.TestCase):
+
+ def test_keypoint_heatmap_targets(self):
+ def graph_fn():
+ gt_classes_list = [
+ tf.one_hot([0, 1, 0, 1], depth=4),
+ ]
+ coordinates = tf.expand_dims(
+ tf.constant(
+ np.array([[0.1, 0.2, 0.3, 0.4, 0.5],
+ [float('nan'), 0.7, float('nan'), 0.9, 1.0],
+ [0.4, 0.1, 0.4, 0.2, 0.1],
+ [float('nan'), 0.1, 0.5, 0.7, 0.6]]),
+ dtype=tf.float32),
+ axis=2)
+ gt_keypoints_list = [tf.concat([coordinates, coordinates], axis=2)]
+ gt_boxes_list = [
+ tf.constant(
+ np.array([[0.0, 0.0, 0.3, 0.3],
+ [0.0, 0.0, 0.5, 0.5],
+ [0.0, 0.0, 0.5, 0.5],
+ [0.0, 0.0, 1.0, 1.0]]),
+ dtype=tf.float32)
+ ]
+
+ cn_assigner = targetassigner.CenterNetKeypointTargetAssigner(
+ stride=4,
+ class_id=1,
+ keypoint_indices=[0, 2])
+ (targets, num_instances_batch,
+ valid_mask) = cn_assigner.assign_keypoint_heatmap_targets(
+ 120,
+ 80,
+ gt_keypoints_list,
+ gt_classes_list,
+ gt_boxes_list=gt_boxes_list)
+ return targets, num_instances_batch, valid_mask
+
+ targets, num_instances_batch, valid_mask = self.execute(graph_fn, [])
+ # keypoint (0.5, 0.5) is selected. The peak is expected to appear at the
+ # center of the image.
+ self.assertEqual((15, 10), _array_argmax(targets[0, :, :, 1]))
+ self.assertAlmostEqual(1.0, targets[0, 15, 10, 1])
+ # No peak for the first class since NaN is selected.
+ self.assertAlmostEqual(0.0, targets[0, 15, 10, 0])
+ # Verify the output heatmap shape.
+ self.assertAllEqual([1, 30, 20, 2], targets.shape)
+ # Verify the number of instances is correct.
+ np.testing.assert_array_almost_equal([[0, 1]],
+ num_instances_batch)
+ # When calling the function, we specify the class id to be 1 (1th and 3rd)
+ # instance and the keypoint indices to be [0, 2], meaning that the 1st
+ # instance is the target class with no valid keypoints in it. As a result,
+ # the region of the 1st instance boxing box should be blacked out
+ # (0.0, 0.0, 0.5, 0.5), transfering to (0, 0, 15, 10) in absolute output
+ # space.
+ self.assertAlmostEqual(np.sum(valid_mask[:, 0:16, 0:11]), 0.0)
+ # All other values are 1.0 so the sum is: 30 * 20 - 16 * 11 = 424.
+ self.assertAlmostEqual(np.sum(valid_mask), 424.0)
+
+ def test_assign_keypoints_offset_targets(self):
+ def graph_fn():
+ gt_classes_list = [
+ tf.one_hot([0, 1, 0, 1], depth=4),
+ ]
+ coordinates = tf.expand_dims(
+ tf.constant(
+ np.array([[0.1, 0.2, 0.3, 0.4, 0.5],
+ [float('nan'), 0.7, float('nan'), 0.9, 0.4],
+ [0.4, 0.1, 0.4, 0.2, 0.0],
+ [float('nan'), 0.0, 0.12, 0.7, 0.4]]),
+ dtype=tf.float32),
+ axis=2)
+ gt_keypoints_list = [tf.concat([coordinates, coordinates], axis=2)]
+
+ cn_assigner = targetassigner.CenterNetKeypointTargetAssigner(
+ stride=4,
+ class_id=1,
+ keypoint_indices=[0, 2])
+ (indices, offsets, weights) = cn_assigner.assign_keypoints_offset_targets(
+ height=120,
+ width=80,
+ gt_keypoints_list=gt_keypoints_list,
+ gt_classes_list=gt_classes_list)
+ return indices, weights, offsets
+ indices, weights, offsets = self.execute(graph_fn, [])
+ # Only the last element has positive weight.
+ np.testing.assert_array_almost_equal(
+ [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0], weights)
+ # Validate the last element's indices and offsets.
+ np.testing.assert_array_equal([0, 3, 2], indices[7, :])
+ np.testing.assert_array_almost_equal([0.6, 0.4], offsets[7, :])
+
+ def test_assign_keypoints_offset_targets_radius(self):
+ def graph_fn():
+ gt_classes_list = [
+ tf.one_hot([0, 1, 0, 1], depth=4),
+ ]
+ coordinates = tf.expand_dims(
+ tf.constant(
+ np.array([[0.1, 0.2, 0.3, 0.4, 0.5],
+ [float('nan'), 0.7, float('nan'), 0.9, 0.4],
+ [0.4, 0.1, 0.4, 0.2, 0.0],
+ [float('nan'), 0.0, 0.12, 0.7, 0.4]]),
+ dtype=tf.float32),
+ axis=2)
+ gt_keypoints_list = [tf.concat([coordinates, coordinates], axis=2)]
+
+ cn_assigner = targetassigner.CenterNetKeypointTargetAssigner(
+ stride=4,
+ class_id=1,
+ keypoint_indices=[0, 2],
+ peak_radius=1,
+ per_keypoint_offset=True)
+ (indices, offsets, weights) = cn_assigner.assign_keypoints_offset_targets(
+ height=120,
+ width=80,
+ gt_keypoints_list=gt_keypoints_list,
+ gt_classes_list=gt_classes_list)
+ return indices, weights, offsets
+ indices, weights, offsets = self.execute(graph_fn, [])
+
+ # There are total 8 * 5 (neighbors) = 40 targets.
+ self.assertAllEqual(indices.shape, [40, 4])
+ self.assertAllEqual(offsets.shape, [40, 2])
+ self.assertAllEqual(weights.shape, [40])
+ # Only the last 5 (radius 1 generates 5 valid points) element has positive
+ # weight.
+ np.testing.assert_array_almost_equal([
+ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
+ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
+ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0
+ ], weights)
+ # Validate the last element's (with neighbors) indices and offsets.
+ np.testing.assert_array_equal([0, 2, 2, 1], indices[35, :])
+ np.testing.assert_array_equal([0, 3, 1, 1], indices[36, :])
+ np.testing.assert_array_equal([0, 3, 2, 1], indices[37, :])
+ np.testing.assert_array_equal([0, 3, 3, 1], indices[38, :])
+ np.testing.assert_array_equal([0, 4, 2, 1], indices[39, :])
+ np.testing.assert_array_almost_equal([1.6, 0.4], offsets[35, :])
+ np.testing.assert_array_almost_equal([0.6, 1.4], offsets[36, :])
+ np.testing.assert_array_almost_equal([0.6, 0.4], offsets[37, :])
+ np.testing.assert_array_almost_equal([0.6, -0.6], offsets[38, :])
+ np.testing.assert_array_almost_equal([-0.4, 0.4], offsets[39, :])
+
+ def test_assign_joint_regression_targets(self):
+ def graph_fn():
+ gt_boxes_list = [
+ tf.constant(
+ np.array([[0.0, 0.0, 0.0, 0.0],
+ [0.0, 0.0, 0.0, 0.0],
+ [0.0, 0.0, 0.0, 0.0],
+ [0.0, 0.0, 1.0, 1.0]]),
+ dtype=tf.float32)
+ ]
+ gt_classes_list = [
+ tf.one_hot([0, 1, 0, 1], depth=4),
+ ]
+ coordinates = tf.expand_dims(
+ tf.constant(
+ np.array([[0.1, 0.2, 0.3, 0.4, 0.5],
+ [float('nan'), 0.7, float('nan'), 0.9, 0.4],
+ [0.4, 0.1, 0.4, 0.2, 0.0],
+ [float('nan'), 0.0, 0.12, 0.7, 0.4]]),
+ dtype=tf.float32),
+ axis=2)
+ gt_keypoints_list = [tf.concat([coordinates, coordinates], axis=2)]
+
+ cn_assigner = targetassigner.CenterNetKeypointTargetAssigner(
+ stride=4,
+ class_id=1,
+ keypoint_indices=[0, 2])
+ (indices, offsets, weights) = cn_assigner.assign_joint_regression_targets(
+ height=120,
+ width=80,
+ gt_keypoints_list=gt_keypoints_list,
+ gt_classes_list=gt_classes_list,
+ gt_boxes_list=gt_boxes_list)
+ return indices, offsets, weights
+ indices, offsets, weights = self.execute(graph_fn, [])
+ np.testing.assert_array_almost_equal(
+ [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0], weights)
+ np.testing.assert_array_equal([0, 15, 10, 1], indices[7, :])
+ np.testing.assert_array_almost_equal([-11.4, -7.6], offsets[7, :])
+
+ def test_assign_joint_regression_targets_radius(self):
+ def graph_fn():
+ gt_boxes_list = [
+ tf.constant(
+ np.array([[0.0, 0.0, 0.0, 0.0],
+ [0.0, 0.0, 0.0, 0.0],
+ [0.0, 0.0, 0.0, 0.0],
+ [0.0, 0.0, 1.0, 1.0]]),
+ dtype=tf.float32)
+ ]
+ gt_classes_list = [
+ tf.one_hot([0, 1, 0, 1], depth=4),
+ ]
+ coordinates = tf.expand_dims(
+ tf.constant(
+ np.array([[0.1, 0.2, 0.3, 0.4, 0.5],
+ [float('nan'), 0.7, float('nan'), 0.9, 0.4],
+ [0.4, 0.1, 0.4, 0.2, 0.0],
+ [float('nan'), 0.0, 0.12, 0.7, 0.4]]),
+ dtype=tf.float32),
+ axis=2)
+ gt_keypoints_list = [tf.concat([coordinates, coordinates], axis=2)]
+
+ cn_assigner = targetassigner.CenterNetKeypointTargetAssigner(
+ stride=4,
+ class_id=1,
+ keypoint_indices=[0, 2],
+ peak_radius=1)
+ (indices, offsets, weights) = cn_assigner.assign_joint_regression_targets(
+ height=120,
+ width=80,
+ gt_keypoints_list=gt_keypoints_list,
+ gt_classes_list=gt_classes_list,
+ gt_boxes_list=gt_boxes_list)
+ return indices, offsets, weights
+ indices, offsets, weights = self.execute(graph_fn, [])
+
+ # There are total 8 * 5 (neighbors) = 40 targets.
+ self.assertAllEqual(indices.shape, [40, 4])
+ self.assertAllEqual(offsets.shape, [40, 2])
+ self.assertAllEqual(weights.shape, [40])
+ # Only the last 5 (radius 1 generates 5 valid points) element has positive
+ # weight.
+ np.testing.assert_array_almost_equal([
+ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
+ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
+ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0
+ ], weights)
+ # Test the values of the indices and offsets of the last 5 elements.
+ np.testing.assert_array_equal([0, 14, 10, 1], indices[35, :])
+ np.testing.assert_array_equal([0, 15, 9, 1], indices[36, :])
+ np.testing.assert_array_equal([0, 15, 10, 1], indices[37, :])
+ np.testing.assert_array_equal([0, 15, 11, 1], indices[38, :])
+ np.testing.assert_array_equal([0, 16, 10, 1], indices[39, :])
+ np.testing.assert_array_almost_equal([-10.4, -7.6], offsets[35, :])
+ np.testing.assert_array_almost_equal([-11.4, -6.6], offsets[36, :])
+ np.testing.assert_array_almost_equal([-11.4, -7.6], offsets[37, :])
+ np.testing.assert_array_almost_equal([-11.4, -8.6], offsets[38, :])
+ np.testing.assert_array_almost_equal([-12.4, -7.6], offsets[39, :])
+
+
+class CenterNetMaskTargetAssignerTest(test_case.TestCase):
+
+ def test_assign_segmentation_targets(self):
+ def graph_fn():
+ gt_masks_list = [
+ # Example 0.
+ tf.constant([
+ [
+ [1., 0., 0., 0.],
+ [1., 1., 0., 0.],
+ [0., 0., 0., 0.],
+ [0., 0., 0., 0.],
+ ],
+ [
+ [0., 0., 0., 0.],
+ [0., 0., 0., 1.],
+ [0., 0., 0., 0.],
+ [0., 0., 0., 0.],
+ ],
+ [
+ [1., 1., 0., 0.],
+ [1., 1., 0., 0.],
+ [0., 0., 1., 1.],
+ [0., 0., 1., 1.],
+ ]
+ ], dtype=tf.float32),
+ # Example 1.
+ tf.constant([
+ [
+ [1., 1., 0., 1.],
+ [1., 1., 1., 1.],
+ [0., 0., 1., 1.],
+ [0., 0., 0., 1.],
+ ],
+ [
+ [0., 0., 0., 0.],
+ [0., 0., 0., 0.],
+ [1., 1., 0., 0.],
+ [1., 1., 0., 0.],
+ ],
+ ], dtype=tf.float32),
+ ]
+ gt_classes_list = [
+ # Example 0.
+ tf.constant([[1., 0., 0.],
+ [0., 1., 0.],
+ [1., 0., 0.]], dtype=tf.float32),
+ # Example 1.
+ tf.constant([[0., 1., 0.],
+ [0., 1., 0.]], dtype=tf.float32)
+ ]
+ cn_assigner = targetassigner.CenterNetMaskTargetAssigner(stride=2)
+ segmentation_target = cn_assigner.assign_segmentation_targets(
+ gt_masks_list=gt_masks_list,
+ gt_classes_list=gt_classes_list,
+ mask_resize_method=targetassigner.ResizeMethod.NEAREST_NEIGHBOR)
+ return segmentation_target
+ segmentation_target = self.execute(graph_fn, [])
+
+ expected_seg_target = np.array([
+ # Example 0 [[class 0, class 1], [background, class 0]]
+ [[[1, 0, 0], [0, 1, 0]],
+ [[0, 0, 0], [1, 0, 0]]],
+ # Example 1 [[class 1, class 1], [class 1, class 1]]
+ [[[0, 1, 0], [0, 1, 0]],
+ [[0, 1, 0], [0, 1, 0]]],
+ ], dtype=np.float32)
+ np.testing.assert_array_almost_equal(
+ expected_seg_target, segmentation_target)
+
if __name__ == '__main__':
tf.enable_v2_behavior()
diff --git a/research/object_detection/data_decoders/tf_example_decoder.py b/research/object_detection/data_decoders/tf_example_decoder.py
index bd1fa2c771ec61a0ebc438392a966c08aff6faad..04cc4db59988161345c5cacd2e6f513b2707b0a1 100644
--- a/research/object_detection/data_decoders/tf_example_decoder.py
+++ b/research/object_detection/data_decoders/tf_example_decoder.py
@@ -30,6 +30,7 @@ from object_detection.core import data_decoder
from object_detection.core import standard_fields as fields
from object_detection.protos import input_reader_pb2
from object_detection.utils import label_map_util
+from object_detection.utils import shape_utils
# pylint: disable=g-import-not-at-top
try:
@@ -170,7 +171,8 @@ class TfExampleDecoder(data_decoder.DataDecoder):
num_additional_channels=0,
load_multiclass_scores=False,
load_context_features=False,
- expand_hierarchy_labels=False):
+ expand_hierarchy_labels=False,
+ load_dense_pose=False):
"""Constructor sets keys_to_features and items_to_handlers.
Args:
@@ -201,6 +203,7 @@ class TfExampleDecoder(data_decoder.DataDecoder):
account the provided hierarchy in the label_map_proto_file. For positive
classes, the labels are extended to ancestor. For negative classes,
the labels are expanded to descendants.
+ load_dense_pose: Whether to load DensePose annotations.
Raises:
ValueError: If `instance_mask_type` option is not one of
@@ -371,6 +374,34 @@ class TfExampleDecoder(data_decoder.DataDecoder):
self._decode_png_instance_masks))
else:
raise ValueError('Did not recognize the `instance_mask_type` option.')
+ if load_dense_pose:
+ self.keys_to_features['image/object/densepose/num'] = (
+ tf.VarLenFeature(tf.int64))
+ self.keys_to_features['image/object/densepose/part_index'] = (
+ tf.VarLenFeature(tf.int64))
+ self.keys_to_features['image/object/densepose/x'] = (
+ tf.VarLenFeature(tf.float32))
+ self.keys_to_features['image/object/densepose/y'] = (
+ tf.VarLenFeature(tf.float32))
+ self.keys_to_features['image/object/densepose/u'] = (
+ tf.VarLenFeature(tf.float32))
+ self.keys_to_features['image/object/densepose/v'] = (
+ tf.VarLenFeature(tf.float32))
+ self.items_to_handlers[
+ fields.InputDataFields.groundtruth_dp_num_points] = (
+ slim_example_decoder.Tensor('image/object/densepose/num'))
+ self.items_to_handlers[fields.InputDataFields.groundtruth_dp_part_ids] = (
+ slim_example_decoder.ItemHandlerCallback(
+ ['image/object/densepose/part_index',
+ 'image/object/densepose/num'], self._dense_pose_part_indices))
+ self.items_to_handlers[
+ fields.InputDataFields.groundtruth_dp_surface_coords] = (
+ slim_example_decoder.ItemHandlerCallback(
+ ['image/object/densepose/x', 'image/object/densepose/y',
+ 'image/object/densepose/u', 'image/object/densepose/v',
+ 'image/object/densepose/num'],
+ self._dense_pose_surface_coordinates))
+
if label_map_proto_file:
# If the label_map_proto is provided, try to use it in conjunction with
# the class text, and fall back to a materialized ID.
@@ -547,6 +578,14 @@ class TfExampleDecoder(data_decoder.DataDecoder):
group_of = fields.InputDataFields.groundtruth_group_of
tensor_dict[group_of] = tf.cast(tensor_dict[group_of], dtype=tf.bool)
+ if fields.InputDataFields.groundtruth_dp_num_points in tensor_dict:
+ tensor_dict[fields.InputDataFields.groundtruth_dp_num_points] = tf.cast(
+ tensor_dict[fields.InputDataFields.groundtruth_dp_num_points],
+ dtype=tf.int32)
+ tensor_dict[fields.InputDataFields.groundtruth_dp_part_ids] = tf.cast(
+ tensor_dict[fields.InputDataFields.groundtruth_dp_part_ids],
+ dtype=tf.int32)
+
return tensor_dict
def _reshape_keypoints(self, keys_to_tensors):
@@ -697,6 +736,97 @@ class TfExampleDecoder(data_decoder.DataDecoder):
lambda: tf.map_fn(decode_png_mask, png_masks, dtype=tf.float32),
lambda: tf.zeros(tf.cast(tf.stack([0, height, width]), dtype=tf.int32)))
+ def _dense_pose_part_indices(self, keys_to_tensors):
+ """Creates a tensor that contains part indices for each DensePose point.
+
+ Args:
+ keys_to_tensors: a dictionary from keys to tensors.
+
+ Returns:
+ A 2-D int32 tensor of shape [num_instances, num_points] where each element
+ contains the DensePose part index (0-23). The value `num_points`
+ corresponds to the maximum number of sampled points across all instances
+ in the image. Note that instances with less sampled points will be padded
+ with zeros in the last dimension.
+ """
+ num_points_per_instances = keys_to_tensors['image/object/densepose/num']
+ part_index = keys_to_tensors['image/object/densepose/part_index']
+ if isinstance(num_points_per_instances, tf.SparseTensor):
+ num_points_per_instances = tf.sparse_tensor_to_dense(
+ num_points_per_instances)
+ if isinstance(part_index, tf.SparseTensor):
+ part_index = tf.sparse_tensor_to_dense(part_index)
+ part_index = tf.cast(part_index, dtype=tf.int32)
+ max_points_per_instance = tf.cast(
+ tf.math.reduce_max(num_points_per_instances), dtype=tf.int32)
+ num_points_cumulative = tf.concat([
+ [0], tf.math.cumsum(num_points_per_instances)], axis=0)
+
+ def pad_parts_tensor(instance_ind):
+ points_range_start = num_points_cumulative[instance_ind]
+ points_range_end = num_points_cumulative[instance_ind + 1]
+ part_inds = part_index[points_range_start:points_range_end]
+ return shape_utils.pad_or_clip_nd(part_inds,
+ output_shape=[max_points_per_instance])
+
+ return tf.map_fn(pad_parts_tensor,
+ tf.range(tf.size(num_points_per_instances)),
+ dtype=tf.int32)
+
+ def _dense_pose_surface_coordinates(self, keys_to_tensors):
+ """Creates a tensor that contains surface coords for each DensePose point.
+
+ Args:
+ keys_to_tensors: a dictionary from keys to tensors.
+
+ Returns:
+ A 3-D float32 tensor of shape [num_instances, num_points, 4] where each
+ point contains (y, x, v, u) data for each sampled DensePose point. The
+ (y, x) coordinate has normalized image locations for the point, and (v, u)
+ contains the surface coordinate (also normalized) for the part. The value
+ `num_points` corresponds to the maximum number of sampled points across
+ all instances in the image. Note that instances with less sampled points
+ will be padded with zeros in dim=1.
+ """
+ num_points_per_instances = keys_to_tensors['image/object/densepose/num']
+ dp_y = keys_to_tensors['image/object/densepose/y']
+ dp_x = keys_to_tensors['image/object/densepose/x']
+ dp_v = keys_to_tensors['image/object/densepose/v']
+ dp_u = keys_to_tensors['image/object/densepose/u']
+ if isinstance(num_points_per_instances, tf.SparseTensor):
+ num_points_per_instances = tf.sparse_tensor_to_dense(
+ num_points_per_instances)
+ if isinstance(dp_y, tf.SparseTensor):
+ dp_y = tf.sparse_tensor_to_dense(dp_y)
+ if isinstance(dp_x, tf.SparseTensor):
+ dp_x = tf.sparse_tensor_to_dense(dp_x)
+ if isinstance(dp_v, tf.SparseTensor):
+ dp_v = tf.sparse_tensor_to_dense(dp_v)
+ if isinstance(dp_u, tf.SparseTensor):
+ dp_u = tf.sparse_tensor_to_dense(dp_u)
+ max_points_per_instance = tf.cast(
+ tf.math.reduce_max(num_points_per_instances), dtype=tf.int32)
+ num_points_cumulative = tf.concat([
+ [0], tf.math.cumsum(num_points_per_instances)], axis=0)
+
+ def pad_surface_coordinates_tensor(instance_ind):
+ """Pads DensePose surface coordinates for each instance."""
+ points_range_start = num_points_cumulative[instance_ind]
+ points_range_end = num_points_cumulative[instance_ind + 1]
+ y = dp_y[points_range_start:points_range_end]
+ x = dp_x[points_range_start:points_range_end]
+ v = dp_v[points_range_start:points_range_end]
+ u = dp_u[points_range_start:points_range_end]
+ # Create [num_points_i, 4] tensor, where num_points_i is the number of
+ # sampled points for instance i.
+ unpadded_tensor = tf.stack([y, x, v, u], axis=1)
+ return shape_utils.pad_or_clip_nd(
+ unpadded_tensor, output_shape=[max_points_per_instance, 4])
+
+ return tf.map_fn(pad_surface_coordinates_tensor,
+ tf.range(tf.size(num_points_per_instances)),
+ dtype=tf.float32)
+
def _expand_image_label_hierarchy(self, image_classes, image_confidences):
"""Expand image level labels according to the hierarchy.
diff --git a/research/object_detection/data_decoders/tf_example_decoder_test.py b/research/object_detection/data_decoders/tf_example_decoder_test.py
index 9cbed32fc05f3d6b2c9e3233633627412482e0f5..81ed9258e650d7534bd9e3ae76aa574bc2a06b61 100644
--- a/research/object_detection/data_decoders/tf_example_decoder_test.py
+++ b/research/object_detection/data_decoders/tf_example_decoder_test.py
@@ -1096,8 +1096,8 @@ class TfExampleDecoderTest(test_case.TestCase):
return example_decoder.decode(tf.convert_to_tensor(example))
tensor_dict = self.execute_cpu(graph_fn, [])
- self.assertTrue(
- fields.InputDataFields.groundtruth_instance_masks not in tensor_dict)
+ self.assertNotIn(fields.InputDataFields.groundtruth_instance_masks,
+ tensor_dict)
def testDecodeImageLabels(self):
image_tensor = np.random.randint(256, size=(4, 5, 3)).astype(np.uint8)
@@ -1116,8 +1116,7 @@ class TfExampleDecoderTest(test_case.TestCase):
return example_decoder.decode(tf.convert_to_tensor(example))
tensor_dict = self.execute_cpu(graph_fn_1, [])
- self.assertTrue(
- fields.InputDataFields.groundtruth_image_classes in tensor_dict)
+ self.assertIn(fields.InputDataFields.groundtruth_image_classes, tensor_dict)
self.assertAllEqual(
tensor_dict[fields.InputDataFields.groundtruth_image_classes],
np.array([1, 2]))
@@ -1152,8 +1151,7 @@ class TfExampleDecoderTest(test_case.TestCase):
return example_decoder.decode(tf.convert_to_tensor(example))
tensor_dict = self.execute_cpu(graph_fn_2, [])
- self.assertTrue(
- fields.InputDataFields.groundtruth_image_classes in tensor_dict)
+ self.assertIn(fields.InputDataFields.groundtruth_image_classes, tensor_dict)
self.assertAllEqual(
tensor_dict[fields.InputDataFields.groundtruth_image_classes],
np.array([1, 3]))
@@ -1345,6 +1343,93 @@ class TfExampleDecoderTest(test_case.TestCase):
expected_image_confidence,
tensor_dict[fields.InputDataFields.groundtruth_image_confidences])
+ def testDecodeDensePose(self):
+ image_tensor = np.random.randint(256, size=(4, 5, 3)).astype(np.uint8)
+ encoded_jpeg, _ = self._create_encoded_and_decoded_data(
+ image_tensor, 'jpeg')
+ bbox_ymins = [0.0, 4.0, 2.0]
+ bbox_xmins = [1.0, 5.0, 8.0]
+ bbox_ymaxs = [2.0, 6.0, 1.0]
+ bbox_xmaxs = [3.0, 7.0, 3.3]
+ densepose_num = [0, 4, 2]
+ densepose_part_index = [2, 2, 3, 4, 2, 9]
+ densepose_x = [0.1, 0.2, 0.3, 0.4, 0.5, 0.6]
+ densepose_y = [0.9, 0.8, 0.7, 0.6, 0.5, 0.4]
+ densepose_u = [0.01, 0.02, 0.03, 0.04, 0.05, 0.06]
+ densepose_v = [0.99, 0.98, 0.97, 0.96, 0.95, 0.94]
+
+ def graph_fn():
+ example = tf.train.Example(
+ features=tf.train.Features(
+ feature={
+ 'image/encoded':
+ dataset_util.bytes_feature(encoded_jpeg),
+ 'image/format':
+ dataset_util.bytes_feature(six.b('jpeg')),
+ 'image/object/bbox/ymin':
+ dataset_util.float_list_feature(bbox_ymins),
+ 'image/object/bbox/xmin':
+ dataset_util.float_list_feature(bbox_xmins),
+ 'image/object/bbox/ymax':
+ dataset_util.float_list_feature(bbox_ymaxs),
+ 'image/object/bbox/xmax':
+ dataset_util.float_list_feature(bbox_xmaxs),
+ 'image/object/densepose/num':
+ dataset_util.int64_list_feature(densepose_num),
+ 'image/object/densepose/part_index':
+ dataset_util.int64_list_feature(densepose_part_index),
+ 'image/object/densepose/x':
+ dataset_util.float_list_feature(densepose_x),
+ 'image/object/densepose/y':
+ dataset_util.float_list_feature(densepose_y),
+ 'image/object/densepose/u':
+ dataset_util.float_list_feature(densepose_u),
+ 'image/object/densepose/v':
+ dataset_util.float_list_feature(densepose_v),
+
+ })).SerializeToString()
+
+ example_decoder = tf_example_decoder.TfExampleDecoder(
+ load_dense_pose=True)
+ output = example_decoder.decode(tf.convert_to_tensor(example))
+ dp_num_points = output[fields.InputDataFields.groundtruth_dp_num_points]
+ dp_part_ids = output[fields.InputDataFields.groundtruth_dp_part_ids]
+ dp_surface_coords = output[
+ fields.InputDataFields.groundtruth_dp_surface_coords]
+ return dp_num_points, dp_part_ids, dp_surface_coords
+
+ dp_num_points, dp_part_ids, dp_surface_coords = self.execute_cpu(
+ graph_fn, [])
+
+ expected_dp_num_points = [0, 4, 2]
+ expected_dp_part_ids = [
+ [0, 0, 0, 0],
+ [2, 2, 3, 4],
+ [2, 9, 0, 0]
+ ]
+ expected_dp_surface_coords = np.array(
+ [
+ # Instance 0 (no points).
+ [[0., 0., 0., 0.],
+ [0., 0., 0., 0.],
+ [0., 0., 0., 0.],
+ [0., 0., 0., 0.]],
+ # Instance 1 (4 points).
+ [[0.9, 0.1, 0.99, 0.01],
+ [0.8, 0.2, 0.98, 0.02],
+ [0.7, 0.3, 0.97, 0.03],
+ [0.6, 0.4, 0.96, 0.04]],
+ # Instance 2 (2 points).
+ [[0.5, 0.5, 0.95, 0.05],
+ [0.4, 0.6, 0.94, 0.06],
+ [0., 0., 0., 0.],
+ [0., 0., 0., 0.]],
+ ], dtype=np.float32)
+
+ self.assertAllEqual(dp_num_points, expected_dp_num_points)
+ self.assertAllEqual(dp_part_ids, expected_dp_part_ids)
+ self.assertAllClose(dp_surface_coords, expected_dp_surface_coords)
+
if __name__ == '__main__':
tf.test.main()
diff --git a/official/r1/__init__.py b/research/object_detection/dataset_tools/context_rcnn/__init__.py
similarity index 100%
rename from official/r1/__init__.py
rename to research/object_detection/dataset_tools/context_rcnn/__init__.py
diff --git a/research/object_detection/dataset_tools/context_rcnn/add_context_to_examples.py b/research/object_detection/dataset_tools/context_rcnn/add_context_to_examples.py
new file mode 100644
index 0000000000000000000000000000000000000000..9c05387980e193f9cb40a767944357d80379384c
--- /dev/null
+++ b/research/object_detection/dataset_tools/context_rcnn/add_context_to_examples.py
@@ -0,0 +1,845 @@
+# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+r"""A Beam job to add contextual memory banks to tf.Examples.
+
+This tool groups images containing bounding boxes and embedded context features
+by a key, either `image/location` or `image/seq_id`, and time horizon,
+then uses these groups to build up a contextual memory bank from the embedded
+context features from each image in the group and adds that context to the
+output tf.Examples for each image in the group.
+
+Steps to generate a dataset with context from one with bounding boxes and
+embedded context features:
+1. Use object/detection/export_inference_graph.py to get a `saved_model` for
+ inference. The input node must accept a tf.Example proto.
+2. Run this tool with `saved_model` from step 1 and a TFRecord of tf.Example
+ protos containing images, bounding boxes, and embedded context features.
+ The context features can be added to tf.Examples using
+ generate_embedding_data.py.
+
+Example Usage:
+--------------
+python add_context_to_examples.py \
+ --input_tfrecord path/to/input_tfrecords* \
+ --output_tfrecord path/to/output_tfrecords \
+ --sequence_key image/location \
+ --time_horizon month
+
+"""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import copy
+import datetime
+import io
+import itertools
+import json
+import os
+
+from absl import app
+from absl import flags
+import apache_beam as beam
+import numpy as np
+import PIL.Image
+import six
+import tensorflow as tf
+
+from apache_beam import runners
+
+
+flags.DEFINE_string('input_tfrecord', None, 'TFRecord containing images in '
+ 'tf.Example format for object detection, with bounding'
+ 'boxes and contextual feature embeddings.')
+flags.DEFINE_string('output_tfrecord', None,
+ 'TFRecord containing images in tf.Example format, with '
+ 'added contextual memory banks.')
+flags.DEFINE_string('sequence_key', None, 'Key to use when grouping sequences: '
+ 'so far supports `image/seq_id` and `image/location`.')
+flags.DEFINE_string('time_horizon', None, 'What time horizon to use when '
+ 'splitting the data, if any. Options are: `year`, `month`,'
+ ' `week`, `day `, `hour`, `minute`, `None`.')
+flags.DEFINE_integer('subsample_context_features_rate', 0, 'Whether to '
+ 'subsample the context_features, and if so how many to '
+ 'sample. If the rate is set to X, it will sample context '
+ 'from 1 out of every X images. Default is sampling from '
+ 'every image, which is X=0.')
+flags.DEFINE_boolean('reduce_image_size', True, 'downsamples images to'
+ 'have longest side max_image_dimension, maintaining aspect'
+ ' ratio')
+flags.DEFINE_integer('max_image_dimension', 1024, 'sets max image dimension')
+flags.DEFINE_boolean('add_context_features', True, 'adds a memory bank of'
+ 'embeddings to each clip')
+flags.DEFINE_boolean('sorted_image_ids', True, 'whether the image source_ids '
+ 'are sortable to deal with date_captured tie-breaks')
+flags.DEFINE_string('image_ids_to_keep', 'All', 'path to .json list of image'
+ 'ids to keep, used for ground truth eval creation')
+flags.DEFINE_boolean('keep_context_features_image_id_list', False, 'Whether or '
+ 'not to keep a list of the image_ids corresponding to the '
+ 'memory bank')
+flags.DEFINE_boolean('keep_only_positives', False, 'Whether or not to '
+ 'keep only positive boxes based on score')
+flags.DEFINE_boolean('keep_only_positives_gt', False, 'Whether or not to '
+ 'keep only positive boxes based on gt class')
+flags.DEFINE_float('context_features_score_threshold', 0.7, 'What score '
+ 'threshold to use for boxes in context_features')
+flags.DEFINE_integer('max_num_elements_in_context_features', 2000, 'Sets max '
+ 'num elements per memory bank')
+flags.DEFINE_integer('num_shards', 0, 'Number of output shards.')
+flags.DEFINE_string('output_type', 'tf_sequence_example', 'Output type, one of '
+ '`tf_example`, `tf_sequence_example`')
+flags.DEFINE_integer('max_clip_length', None, 'Max length for sequence '
+ 'example outputs.')
+
+FLAGS = flags.FLAGS
+
+DEFAULT_FEATURE_LENGTH = 2057
+
+
+class ReKeyDataFn(beam.DoFn):
+ """Re-keys tfrecords by sequence_key.
+
+ This Beam DoFn re-keys the tfrecords by a user-defined sequence_key
+ """
+
+ def __init__(self, sequence_key, time_horizon,
+ reduce_image_size, max_image_dimension):
+ """Initialization function.
+
+ Args:
+ sequence_key: A feature name to use as a key for grouping sequences.
+ Must point to a key of type bytes_list
+ time_horizon: What length of time to use to partition the data when
+ building the memory banks. Options: `year`, `month`, `week`, `day `,
+ `hour`, `minute`, None
+ reduce_image_size: Whether to reduce the sizes of the stored images.
+ max_image_dimension: maximum dimension of reduced images
+ """
+ self._sequence_key = sequence_key
+ if time_horizon is None or time_horizon in {'year', 'month', 'week', 'day',
+ 'hour', 'minute'}:
+ self._time_horizon = time_horizon
+ else:
+ raise ValueError('Time horizon not supported.')
+ self._reduce_image_size = reduce_image_size
+ self._max_image_dimension = max_image_dimension
+ self._session = None
+ self._num_examples_processed = beam.metrics.Metrics.counter(
+ 'data_rekey', 'num_tf_examples_processed')
+ self._num_images_resized = beam.metrics.Metrics.counter(
+ 'data_rekey', 'num_images_resized')
+ self._num_images_read = beam.metrics.Metrics.counter(
+ 'data_rekey', 'num_images_read')
+ self._num_images_found = beam.metrics.Metrics.counter(
+ 'data_rekey', 'num_images_read')
+ self._num_got_shape = beam.metrics.Metrics.counter(
+ 'data_rekey', 'num_images_got_shape')
+ self._num_images_found_size = beam.metrics.Metrics.counter(
+ 'data_rekey', 'num_images_found_size')
+ self._num_examples_cleared = beam.metrics.Metrics.counter(
+ 'data_rekey', 'num_examples_cleared')
+ self._num_examples_updated = beam.metrics.Metrics.counter(
+ 'data_rekey', 'num_examples_updated')
+
+ def process(self, tfrecord_entry):
+ return self._rekey_examples(tfrecord_entry)
+
+ def _largest_size_at_most(self, height, width, largest_side):
+ """Computes new shape with the largest side equal to `largest_side`.
+
+ Args:
+ height: an int indicating the current height.
+ width: an int indicating the current width.
+ largest_side: A python integer indicating the size of
+ the largest side after resize.
+ Returns:
+ new_height: an int indicating the new height.
+ new_width: an int indicating the new width.
+ """
+
+ x_scale = float(largest_side) / float(width)
+ y_scale = float(largest_side) / float(height)
+ scale = min(x_scale, y_scale)
+
+ new_width = int(width * scale)
+ new_height = int(height * scale)
+
+ return new_height, new_width
+
+ def _resize_image(self, input_example):
+ """Resizes the image within input_example and updates the height and width.
+
+ Args:
+ input_example: A tf.Example that we want to update to contain a resized
+ image.
+ Returns:
+ input_example: Updated tf.Example.
+ """
+
+ original_image = copy.deepcopy(
+ input_example.features.feature['image/encoded'].bytes_list.value[0])
+ self._num_images_read.inc(1)
+
+ height = copy.deepcopy(
+ input_example.features.feature['image/height'].int64_list.value[0])
+
+ width = copy.deepcopy(
+ input_example.features.feature['image/width'].int64_list.value[0])
+
+ self._num_got_shape.inc(1)
+
+ new_height, new_width = self._largest_size_at_most(
+ height, width, self._max_image_dimension)
+
+ self._num_images_found_size.inc(1)
+
+ encoded_jpg_io = io.BytesIO(original_image)
+ image = PIL.Image.open(encoded_jpg_io)
+ resized_image = image.resize((new_width, new_height))
+
+ with io.BytesIO() as output:
+ resized_image.save(output, format='JPEG')
+ encoded_resized_image = output.getvalue()
+
+ self._num_images_resized.inc(1)
+
+ del input_example.features.feature['image/encoded'].bytes_list.value[:]
+ del input_example.features.feature['image/height'].int64_list.value[:]
+ del input_example.features.feature['image/width'].int64_list.value[:]
+
+ self._num_examples_cleared.inc(1)
+
+ input_example.features.feature['image/encoded'].bytes_list.value.extend(
+ [encoded_resized_image])
+ input_example.features.feature['image/height'].int64_list.value.extend(
+ [new_height])
+ input_example.features.feature['image/width'].int64_list.value.extend(
+ [new_width])
+ self._num_examples_updated.inc(1)
+
+ return input_example
+
+ def _rekey_examples(self, tfrecord_entry):
+ serialized_example = copy.deepcopy(tfrecord_entry)
+
+ input_example = tf.train.Example.FromString(serialized_example)
+
+ self._num_images_found.inc(1)
+
+ if self._reduce_image_size:
+ input_example = self._resize_image(input_example)
+ self._num_images_resized.inc(1)
+
+ new_key = input_example.features.feature[
+ self._sequence_key].bytes_list.value[0]
+
+ if self._time_horizon:
+ date_captured = datetime.datetime.strptime(
+ six.ensure_str(input_example.features.feature[
+ 'image/date_captured'].bytes_list.value[0]), '%Y-%m-%d %H:%M:%S')
+ year = date_captured.year
+ month = date_captured.month
+ day = date_captured.day
+ week = np.floor(float(day) / float(7))
+ hour = date_captured.hour
+ minute = date_captured.minute
+
+ if self._time_horizon == 'year':
+ new_key = new_key + six.ensure_binary('/' + str(year))
+ elif self._time_horizon == 'month':
+ new_key = new_key + six.ensure_binary(
+ '/' + str(year) + '/' + str(month))
+ elif self._time_horizon == 'week':
+ new_key = new_key + six.ensure_binary(
+ '/' + str(year) + '/' + str(month) + '/' + str(week))
+ elif self._time_horizon == 'day':
+ new_key = new_key + six.ensure_binary(
+ '/' + str(year) + '/' + str(month) + '/' + str(day))
+ elif self._time_horizon == 'hour':
+ new_key = new_key + six.ensure_binary(
+ '/' + str(year) + '/' + str(month) + '/' + str(day) + '/' + (
+ str(hour)))
+ elif self._time_horizon == 'minute':
+ new_key = new_key + six.ensure_binary(
+ '/' + str(year) + '/' + str(month) + '/' + str(day) + '/' + (
+ str(hour) + '/' + str(minute)))
+
+ self._num_examples_processed.inc(1)
+
+ return [(new_key, input_example)]
+
+
+class SortGroupedDataFn(beam.DoFn):
+ """Sorts data within a keyed group.
+
+ This Beam DoFn sorts the grouped list of image examples by frame_num
+ """
+
+ def __init__(self, sequence_key, sorted_image_ids,
+ max_num_elements_in_context_features):
+ """Initialization function.
+
+ Args:
+ sequence_key: A feature name to use as a key for grouping sequences.
+ Must point to a key of type bytes_list
+ sorted_image_ids: Whether the image ids are sortable to use as sorting
+ tie-breakers
+ max_num_elements_in_context_features: The maximum number of elements
+ allowed in the memory bank
+ """
+ self._session = None
+ self._num_examples_processed = beam.metrics.Metrics.counter(
+ 'sort_group', 'num_groups_sorted')
+ self._too_many_elements = beam.metrics.Metrics.counter(
+ 'sort_group', 'too_many_elements')
+ self._split_elements = beam.metrics.Metrics.counter(
+ 'sort_group', 'split_elements')
+ self._sequence_key = six.ensure_binary(sequence_key)
+ self._sorted_image_ids = sorted_image_ids
+ self._max_num_elements_in_context_features = (
+ max_num_elements_in_context_features)
+
+ def process(self, grouped_entry):
+ return self._sort_image_examples(grouped_entry)
+
+ def _sort_image_examples(self, grouped_entry):
+ key, example_collection = grouped_entry
+ example_list = list(example_collection)
+
+ def get_frame_num(example):
+ return example.features.feature['image/seq_frame_num'].int64_list.value[0]
+
+ def get_date_captured(example):
+ return datetime.datetime.strptime(
+ six.ensure_str(
+ example.features.feature[
+ 'image/date_captured'].bytes_list.value[0]),
+ '%Y-%m-%d %H:%M:%S')
+
+ def get_image_id(example):
+ return example.features.feature['image/source_id'].bytes_list.value[0]
+
+ if self._sequence_key == six.ensure_binary('image/seq_id'):
+ sorting_fn = get_frame_num
+ elif self._sequence_key == six.ensure_binary('image/location'):
+ if self._sorted_image_ids:
+ sorting_fn = get_image_id
+ else:
+ sorting_fn = get_date_captured
+
+ sorted_example_list = sorted(example_list, key=sorting_fn)
+
+ self._num_examples_processed.inc(1)
+
+ if len(sorted_example_list) > self._max_num_elements_in_context_features:
+ leftovers = sorted_example_list
+ output_list = []
+ count = 0
+ self._too_many_elements.inc(1)
+ while len(leftovers) > self._max_num_elements_in_context_features:
+ self._split_elements.inc(1)
+ new_key = key + six.ensure_binary('_' + str(count))
+ new_list = leftovers[:self._max_num_elements_in_context_features]
+ output_list.append((new_key, new_list))
+ leftovers = leftovers[:self._max_num_elements_in_context_features]
+ count += 1
+ else:
+ output_list = [(key, sorted_example_list)]
+
+ return output_list
+
+
+def get_sliding_window(example_list, max_clip_length, stride_length):
+ """Yields a sliding window over data from example_list.
+
+ Sliding window has width max_clip_len (n) and stride stride_len (m).
+ s -> (s0,s1,...s[n-1]), (s[m],s[m+1],...,s[m+n]), ...
+
+ Args:
+ example_list: A list of examples.
+ max_clip_length: The maximum length of each clip.
+ stride_length: The stride between each clip.
+
+ Yields:
+ A list of lists of examples, each with length <= max_clip_length
+ """
+
+ # check if the list is too short to slide over
+ if len(example_list) < max_clip_length:
+ yield example_list
+ else:
+ starting_values = [i*stride_length for i in
+ range(len(example_list)) if
+ len(example_list) > i*stride_length]
+ for start in starting_values:
+ result = tuple(itertools.islice(example_list, start,
+ min(start + max_clip_length,
+ len(example_list))))
+ yield result
+
+
+class GenerateContextFn(beam.DoFn):
+ """Generates context data for camera trap images.
+
+ This Beam DoFn builds up contextual memory banks from groups of images and
+ stores them in the output tf.Example or tf.Sequence_example for each image.
+ """
+
+ def __init__(self, sequence_key, add_context_features, image_ids_to_keep,
+ keep_context_features_image_id_list=False,
+ subsample_context_features_rate=0,
+ keep_only_positives=False,
+ context_features_score_threshold=0.7,
+ keep_only_positives_gt=False,
+ max_num_elements_in_context_features=5000,
+ pad_context_features=False,
+ output_type='tf_example', max_clip_length=None):
+ """Initialization function.
+
+ Args:
+ sequence_key: A feature name to use as a key for grouping sequences.
+ add_context_features: Whether to keep and store the contextual memory
+ bank.
+ image_ids_to_keep: A list of image ids to save, to use to build data
+ subsets for evaluation.
+ keep_context_features_image_id_list: Whether to save an ordered list of
+ the ids of the images in the contextual memory bank.
+ subsample_context_features_rate: What rate to subsample images for the
+ contextual memory bank.
+ keep_only_positives: Whether to only keep high scoring
+ (>context_features_score_threshold) features in the contextual memory
+ bank.
+ context_features_score_threshold: What threshold to use for keeping
+ features.
+ keep_only_positives_gt: Whether to only keep features from images that
+ contain objects based on the ground truth (for training).
+ max_num_elements_in_context_features: the maximum number of elements in
+ the memory bank
+ pad_context_features: Whether to pad the context features to a fixed size.
+ output_type: What type of output, tf_example of tf_sequence_example
+ max_clip_length: The maximum length of a sequence example, before
+ splitting into multiple
+ """
+ self._session = None
+ self._num_examples_processed = beam.metrics.Metrics.counter(
+ 'sequence_data_generation', 'num_seq_examples_processed')
+ self._num_keys_processed = beam.metrics.Metrics.counter(
+ 'sequence_data_generation', 'num_keys_processed')
+ self._sequence_key = sequence_key
+ self._add_context_features = add_context_features
+ self._pad_context_features = pad_context_features
+ self._output_type = output_type
+ self._max_clip_length = max_clip_length
+ if six.ensure_str(image_ids_to_keep) == 'All':
+ self._image_ids_to_keep = None
+ else:
+ with tf.io.gfile.GFile(image_ids_to_keep) as f:
+ self._image_ids_to_keep = json.load(f)
+ self._keep_context_features_image_id_list = (
+ keep_context_features_image_id_list)
+ self._subsample_context_features_rate = subsample_context_features_rate
+ self._keep_only_positives = keep_only_positives
+ self._keep_only_positives_gt = keep_only_positives_gt
+ self._context_features_score_threshold = context_features_score_threshold
+ self._max_num_elements_in_context_features = (
+ max_num_elements_in_context_features)
+
+ self._images_kept = beam.metrics.Metrics.counter(
+ 'sequence_data_generation', 'images_kept')
+ self._images_loaded = beam.metrics.Metrics.counter(
+ 'sequence_data_generation', 'images_loaded')
+
+ def process(self, grouped_entry):
+ return self._add_context_to_example(copy.deepcopy(grouped_entry))
+
+ def _build_context_features(self, example_list):
+ context_features = []
+ context_features_image_id_list = []
+ count = 0
+ example_embedding = []
+
+ for idx, example in enumerate(example_list):
+ if self._subsample_context_features_rate > 0:
+ if (idx % self._subsample_context_features_rate) != 0:
+ example.features.feature[
+ 'context_features_idx'].int64_list.value.append(
+ self._max_num_elements_in_context_features + 1)
+ continue
+ if self._keep_only_positives:
+ if example.features.feature[
+ 'image/embedding_score'
+ ].float_list.value[0] < self._context_features_score_threshold:
+ example.features.feature[
+ 'context_features_idx'].int64_list.value.append(
+ self._max_num_elements_in_context_features + 1)
+ continue
+ if self._keep_only_positives_gt:
+ if len(example.features.feature[
+ 'image/object/bbox/xmin'
+ ].float_list.value) < 1:
+ example.features.feature[
+ 'context_features_idx'].int64_list.value.append(
+ self._max_num_elements_in_context_features + 1)
+ continue
+
+ example_embedding = list(example.features.feature[
+ 'image/embedding'].float_list.value)
+ context_features.extend(example_embedding)
+ example.features.feature[
+ 'context_features_idx'].int64_list.value.append(count)
+ count += 1
+ example_image_id = example.features.feature[
+ 'image/source_id'].bytes_list.value[0]
+ context_features_image_id_list.append(example_image_id)
+
+ if not example_embedding:
+ example_embedding.append(np.zeros(DEFAULT_FEATURE_LENGTH))
+
+ feature_length = DEFAULT_FEATURE_LENGTH
+
+ # If the example_list is not empty and image/embedding_length is in the
+ # featture dict, feature_length will be assigned to that. Otherwise, it will
+ # be kept as default.
+ if example_list and (
+ 'image/embedding_length' in example_list[0].features.feature):
+ feature_length = example_list[0].features.feature[
+ 'image/embedding_length'].int64_list.value[0]
+
+ if self._pad_context_features:
+ while len(context_features_image_id_list) < (
+ self._max_num_elements_in_context_features):
+ context_features_image_id_list.append('')
+
+ return context_features, feature_length, context_features_image_id_list
+
+ def _add_context_to_example(self, grouped_entry):
+ key, example_collection = grouped_entry
+ list_of_examples = []
+
+ example_list = list(example_collection)
+
+ if self._add_context_features:
+ context_features, feature_length, context_features_image_id_list = (
+ self._build_context_features(example_list))
+
+ if self._image_ids_to_keep is not None:
+ new_example_list = []
+ for example in example_list:
+ im_id = example.features.feature['image/source_id'].bytes_list.value[0]
+ self._images_loaded.inc(1)
+ if six.ensure_str(im_id) in self._image_ids_to_keep:
+ self._images_kept.inc(1)
+ new_example_list.append(example)
+ if new_example_list:
+ example_list = new_example_list
+ else:
+ return []
+
+ if self._output_type == 'tf_sequence_example':
+ if self._max_clip_length is not None:
+ # For now, no overlap
+ clips = get_sliding_window(
+ example_list, self._max_clip_length, self._max_clip_length)
+ else:
+ clips = [example_list]
+
+ for clip_num, clip_list in enumerate(clips):
+ # initialize sequence example
+ seq_example = tf.train.SequenceExample()
+ video_id = six.ensure_str(key)+'_'+ str(clip_num)
+ seq_example.context.feature['clip/media_id'].bytes_list.value.append(
+ video_id.encode('utf8'))
+ seq_example.context.feature['clip/frames'].int64_list.value.append(
+ len(clip_list))
+
+ seq_example.context.feature[
+ 'clip/start/timestamp'].int64_list.value.append(0)
+ seq_example.context.feature[
+ 'clip/end/timestamp'].int64_list.value.append(len(clip_list))
+ seq_example.context.feature['image/format'].bytes_list.value.append(
+ six.ensure_binary('JPG'))
+ seq_example.context.feature['image/channels'].int64_list.value.append(3)
+ context_example = clip_list[0]
+ seq_example.context.feature['image/height'].int64_list.value.append(
+ context_example.features.feature[
+ 'image/height'].int64_list.value[0])
+ seq_example.context.feature['image/width'].int64_list.value.append(
+ context_example.features.feature['image/width'].int64_list.value[0])
+
+ seq_example.context.feature[
+ 'image/context_feature_length'].int64_list.value.append(
+ feature_length)
+ seq_example.context.feature[
+ 'image/context_features'].float_list.value.extend(
+ context_features)
+ if self._keep_context_features_image_id_list:
+ seq_example.context.feature[
+ 'image/context_features_image_id_list'].bytes_list.value.extend(
+ context_features_image_id_list)
+
+ encoded_image_list = seq_example.feature_lists.feature_list[
+ 'image/encoded']
+ timestamps_list = seq_example.feature_lists.feature_list[
+ 'image/timestamp']
+ context_features_idx_list = seq_example.feature_lists.feature_list[
+ 'image/context_features_idx']
+ date_captured_list = seq_example.feature_lists.feature_list[
+ 'image/date_captured']
+ unix_time_list = seq_example.feature_lists.feature_list[
+ 'image/unix_time']
+ location_list = seq_example.feature_lists.feature_list['image/location']
+ image_ids_list = seq_example.feature_lists.feature_list[
+ 'image/source_id']
+ gt_xmin_list = seq_example.feature_lists.feature_list[
+ 'region/bbox/xmin']
+ gt_xmax_list = seq_example.feature_lists.feature_list[
+ 'region/bbox/xmax']
+ gt_ymin_list = seq_example.feature_lists.feature_list[
+ 'region/bbox/ymin']
+ gt_ymax_list = seq_example.feature_lists.feature_list[
+ 'region/bbox/ymax']
+ gt_type_list = seq_example.feature_lists.feature_list[
+ 'region/label/index']
+ gt_type_string_list = seq_example.feature_lists.feature_list[
+ 'region/label/string']
+ gt_is_annotated_list = seq_example.feature_lists.feature_list[
+ 'region/is_annotated']
+
+ for idx, example in enumerate(clip_list):
+
+ encoded_image = encoded_image_list.feature.add()
+ encoded_image.bytes_list.value.extend(
+ example.features.feature['image/encoded'].bytes_list.value)
+
+ image_id = image_ids_list.feature.add()
+ image_id.bytes_list.value.append(
+ example.features.feature['image/source_id'].bytes_list.value[0])
+
+ timestamp = timestamps_list.feature.add()
+ # Timestamp is currently order in the list.
+ timestamp.int64_list.value.extend([idx])
+
+ context_features_idx = context_features_idx_list.feature.add()
+ context_features_idx.int64_list.value.extend(
+ example.features.feature['context_features_idx'].int64_list.value)
+
+ date_captured = date_captured_list.feature.add()
+ date_captured.bytes_list.value.extend(
+ example.features.feature['image/date_captured'].bytes_list.value)
+ unix_time = unix_time_list.feature.add()
+ unix_time.float_list.value.extend(
+ example.features.feature['image/unix_time'].float_list.value)
+ location = location_list.feature.add()
+ location.bytes_list.value.extend(
+ example.features.feature['image/location'].bytes_list.value)
+
+ gt_xmin = gt_xmin_list.feature.add()
+ gt_xmax = gt_xmax_list.feature.add()
+ gt_ymin = gt_ymin_list.feature.add()
+ gt_ymax = gt_ymax_list.feature.add()
+ gt_type = gt_type_list.feature.add()
+ gt_type_str = gt_type_string_list.feature.add()
+
+ gt_is_annotated = gt_is_annotated_list.feature.add()
+ gt_is_annotated.int64_list.value.append(1)
+
+ gt_xmin.float_list.value.extend(
+ example.features.feature[
+ 'image/object/bbox/xmin'].float_list.value)
+ gt_xmax.float_list.value.extend(
+ example.features.feature[
+ 'image/object/bbox/xmax'].float_list.value)
+ gt_ymin.float_list.value.extend(
+ example.features.feature[
+ 'image/object/bbox/ymin'].float_list.value)
+ gt_ymax.float_list.value.extend(
+ example.features.feature[
+ 'image/object/bbox/ymax'].float_list.value)
+
+ gt_type.int64_list.value.extend(
+ example.features.feature[
+ 'image/object/class/label'].int64_list.value)
+ gt_type_str.bytes_list.value.extend(
+ example.features.feature[
+ 'image/object/class/text'].bytes_list.value)
+
+ self._num_examples_processed.inc(1)
+ list_of_examples.append(seq_example)
+
+ elif self._output_type == 'tf_example':
+
+ for example in example_list:
+ im_id = example.features.feature['image/source_id'].bytes_list.value[0]
+
+ if self._add_context_features:
+ example.features.feature[
+ 'image/context_features'].float_list.value.extend(
+ context_features)
+ example.features.feature[
+ 'image/context_feature_length'].int64_list.value.append(
+ feature_length)
+
+ if self._keep_context_features_image_id_list:
+ example.features.feature[
+ 'image/context_features_image_id_list'].bytes_list.value.extend(
+ context_features_image_id_list)
+
+ self._num_examples_processed.inc(1)
+ list_of_examples.append(example)
+
+ return list_of_examples
+
+
+def construct_pipeline(input_tfrecord,
+ output_tfrecord,
+ sequence_key,
+ time_horizon=None,
+ subsample_context_features_rate=0,
+ reduce_image_size=True,
+ max_image_dimension=1024,
+ add_context_features=True,
+ sorted_image_ids=True,
+ image_ids_to_keep='All',
+ keep_context_features_image_id_list=False,
+ keep_only_positives=False,
+ context_features_score_threshold=0.7,
+ keep_only_positives_gt=False,
+ max_num_elements_in_context_features=5000,
+ num_shards=0,
+ output_type='tf_example',
+ max_clip_length=None):
+ """Returns a beam pipeline to run object detection inference.
+
+ Args:
+ input_tfrecord: An TFRecord of tf.train.Example protos containing images.
+ output_tfrecord: An TFRecord of tf.train.Example protos that contain images
+ in the input TFRecord and the detections from the model.
+ sequence_key: A feature name to use as a key for grouping sequences.
+ time_horizon: What length of time to use to partition the data when building
+ the memory banks. Options: `year`, `month`, `week`, `day `, `hour`,
+ `minute`, None.
+ subsample_context_features_rate: What rate to subsample images for the
+ contextual memory bank.
+ reduce_image_size: Whether to reduce the size of the stored images.
+ max_image_dimension: The maximum image dimension to use for resizing.
+ add_context_features: Whether to keep and store the contextual memory bank.
+ sorted_image_ids: Whether the image ids are sortable, and can be used as
+ datetime tie-breakers when building memory banks.
+ image_ids_to_keep: A list of image ids to save, to use to build data subsets
+ for evaluation.
+ keep_context_features_image_id_list: Whether to save an ordered list of the
+ ids of the images in the contextual memory bank.
+ keep_only_positives: Whether to only keep high scoring
+ (>context_features_score_threshold) features in the contextual memory
+ bank.
+ context_features_score_threshold: What threshold to use for keeping
+ features.
+ keep_only_positives_gt: Whether to only keep features from images that
+ contain objects based on the ground truth (for training).
+ max_num_elements_in_context_features: the maximum number of elements in the
+ memory bank
+ num_shards: The number of output shards.
+ output_type: What type of output, tf_example of tf_sequence_example
+ max_clip_length: The maximum length of a sequence example, before
+ splitting into multiple
+ """
+ def pipeline(root):
+ if output_type == 'tf_example':
+ coder = beam.coders.ProtoCoder(tf.train.Example)
+ elif output_type == 'tf_sequence_example':
+ coder = beam.coders.ProtoCoder(tf.train.SequenceExample)
+ else:
+ raise ValueError('Unsupported output type.')
+ input_collection = (
+ root | 'ReadInputTFRecord' >> beam.io.tfrecordio.ReadFromTFRecord(
+ input_tfrecord,
+ coder=beam.coders.BytesCoder()))
+ rekey_collection = input_collection | 'RekeyExamples' >> beam.ParDo(
+ ReKeyDataFn(sequence_key, time_horizon,
+ reduce_image_size, max_image_dimension))
+ grouped_collection = (
+ rekey_collection | 'GroupBySequenceKey' >> beam.GroupByKey())
+ grouped_collection = (
+ grouped_collection | 'ReshuffleGroups' >> beam.Reshuffle())
+ ordered_collection = (
+ grouped_collection | 'OrderByFrameNumber' >> beam.ParDo(
+ SortGroupedDataFn(sequence_key, sorted_image_ids,
+ max_num_elements_in_context_features)))
+ ordered_collection = (
+ ordered_collection | 'ReshuffleSortedGroups' >> beam.Reshuffle())
+ output_collection = (
+ ordered_collection | 'AddContextToExamples' >> beam.ParDo(
+ GenerateContextFn(
+ sequence_key, add_context_features, image_ids_to_keep,
+ keep_context_features_image_id_list=(
+ keep_context_features_image_id_list),
+ subsample_context_features_rate=subsample_context_features_rate,
+ keep_only_positives=keep_only_positives,
+ keep_only_positives_gt=keep_only_positives_gt,
+ context_features_score_threshold=(
+ context_features_score_threshold),
+ max_num_elements_in_context_features=(
+ max_num_elements_in_context_features),
+ output_type=output_type,
+ max_clip_length=max_clip_length)))
+
+ output_collection = (
+ output_collection | 'ReshuffleExamples' >> beam.Reshuffle())
+ _ = output_collection | 'WritetoDisk' >> beam.io.tfrecordio.WriteToTFRecord(
+ output_tfrecord,
+ num_shards=num_shards,
+ coder=coder)
+ return pipeline
+
+
+def main(_):
+ """Runs the Beam pipeline that builds context features.
+
+ Args:
+ _: unused
+ """
+ # must create before flags are used
+ runner = runners.DirectRunner()
+
+ dirname = os.path.dirname(FLAGS.output_tfrecord)
+ tf.io.gfile.makedirs(dirname)
+ runner.run(
+ construct_pipeline(FLAGS.input_tfrecord,
+ FLAGS.output_tfrecord,
+ FLAGS.sequence_key,
+ FLAGS.time_horizon,
+ FLAGS.subsample_context_features_rate,
+ FLAGS.reduce_image_size,
+ FLAGS.max_image_dimension,
+ FLAGS.add_context_features,
+ FLAGS.sorted_image_ids,
+ FLAGS.image_ids_to_keep,
+ FLAGS.keep_context_features_image_id_list,
+ FLAGS.keep_only_positives,
+ FLAGS.context_features_score_threshold,
+ FLAGS.keep_only_positives_gt,
+ FLAGS.max_num_elements_in_context_features,
+ FLAGS.num_shards,
+ FLAGS.output_type,
+ FLAGS.max_clip_length))
+
+
+if __name__ == '__main__':
+ flags.mark_flags_as_required([
+ 'input_tfrecord',
+ 'output_tfrecord'
+ ])
+ app.run(main)
diff --git a/research/object_detection/dataset_tools/context_rcnn/add_context_to_examples_tf1_test.py b/research/object_detection/dataset_tools/context_rcnn/add_context_to_examples_tf1_test.py
new file mode 100644
index 0000000000000000000000000000000000000000..0f10fa7764965f63fdc74ed39861aefa0fc266d5
--- /dev/null
+++ b/research/object_detection/dataset_tools/context_rcnn/add_context_to_examples_tf1_test.py
@@ -0,0 +1,384 @@
+# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Tests for add_context_to_examples."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+import contextlib
+import datetime
+import os
+import tempfile
+import unittest
+import numpy as np
+import six
+import tensorflow.compat.v1 as tf
+
+from object_detection.dataset_tools.context_rcnn import add_context_to_examples
+from object_detection.utils import tf_version
+from apache_beam import runners
+
+
+@contextlib.contextmanager
+def InMemoryTFRecord(entries):
+ temp = tempfile.NamedTemporaryFile(delete=False)
+ filename = temp.name
+ try:
+ with tf.python_io.TFRecordWriter(filename) as writer:
+ for value in entries:
+ writer.write(value)
+ yield filename
+ finally:
+ os.unlink(temp.name)
+
+
+def BytesFeature(value):
+ return tf.train.Feature(bytes_list=tf.train.BytesList(value=[value]))
+
+
+def BytesListFeature(value):
+ return tf.train.Feature(bytes_list=tf.train.BytesList(value=value))
+
+
+def Int64Feature(value):
+ return tf.train.Feature(int64_list=tf.train.Int64List(value=[value]))
+
+
+def Int64ListFeature(value):
+ return tf.train.Feature(int64_list=tf.train.Int64List(value=value))
+
+
+def FloatListFeature(value):
+ return tf.train.Feature(float_list=tf.train.FloatList(value=value))
+
+
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.')
+class GenerateContextDataTest(tf.test.TestCase):
+
+ def _create_first_tf_example(self):
+ with self.test_session():
+ encoded_image = tf.image.encode_jpeg(
+ tf.constant(np.ones((4, 4, 3)).astype(np.uint8))).eval()
+
+ example = tf.train.Example(features=tf.train.Features(feature={
+ 'image/encoded': BytesFeature(encoded_image),
+ 'image/source_id': BytesFeature(six.ensure_binary('image_id_1')),
+ 'image/height': Int64Feature(4),
+ 'image/width': Int64Feature(4),
+ 'image/object/class/label': Int64ListFeature([5, 5]),
+ 'image/object/class/text': BytesListFeature([six.ensure_binary('hyena'),
+ six.ensure_binary('hyena')
+ ]),
+ 'image/object/bbox/xmin': FloatListFeature([0.0, 0.1]),
+ 'image/object/bbox/xmax': FloatListFeature([0.2, 0.3]),
+ 'image/object/bbox/ymin': FloatListFeature([0.4, 0.5]),
+ 'image/object/bbox/ymax': FloatListFeature([0.6, 0.7]),
+ 'image/seq_id': BytesFeature(six.ensure_binary('01')),
+ 'image/seq_num_frames': Int64Feature(2),
+ 'image/seq_frame_num': Int64Feature(0),
+ 'image/date_captured': BytesFeature(
+ six.ensure_binary(str(datetime.datetime(2020, 1, 1, 1, 0, 0)))),
+ 'image/embedding': FloatListFeature([0.1, 0.2, 0.3]),
+ 'image/embedding_score': FloatListFeature([0.9]),
+ 'image/embedding_length': Int64Feature(3)
+
+ }))
+
+ return example.SerializeToString()
+
+ def _create_second_tf_example(self):
+ with self.test_session():
+ encoded_image = tf.image.encode_jpeg(
+ tf.constant(np.ones((4, 4, 3)).astype(np.uint8))).eval()
+
+ example = tf.train.Example(features=tf.train.Features(feature={
+ 'image/encoded': BytesFeature(encoded_image),
+ 'image/source_id': BytesFeature(six.ensure_binary('image_id_2')),
+ 'image/height': Int64Feature(4),
+ 'image/width': Int64Feature(4),
+ 'image/object/class/label': Int64ListFeature([5]),
+ 'image/object/class/text': BytesListFeature([six.ensure_binary('hyena')
+ ]),
+ 'image/object/bbox/xmin': FloatListFeature([0.0]),
+ 'image/object/bbox/xmax': FloatListFeature([0.1]),
+ 'image/object/bbox/ymin': FloatListFeature([0.2]),
+ 'image/object/bbox/ymax': FloatListFeature([0.3]),
+ 'image/seq_id': BytesFeature(six.ensure_binary('01')),
+ 'image/seq_num_frames': Int64Feature(2),
+ 'image/seq_frame_num': Int64Feature(1),
+ 'image/date_captured': BytesFeature(
+ six.ensure_binary(str(datetime.datetime(2020, 1, 1, 1, 1, 0)))),
+ 'image/embedding': FloatListFeature([0.4, 0.5, 0.6]),
+ 'image/embedding_score': FloatListFeature([0.9]),
+ 'image/embedding_length': Int64Feature(3)
+ }))
+
+ return example.SerializeToString()
+
+ def assert_expected_examples(self, tf_example_list):
+ self.assertAllEqual(
+ {tf_example.features.feature['image/source_id'].bytes_list.value[0]
+ for tf_example in tf_example_list},
+ {six.ensure_binary('image_id_1'), six.ensure_binary('image_id_2')})
+ self.assertAllClose(
+ tf_example_list[0].features.feature[
+ 'image/context_features'].float_list.value,
+ [0.1, 0.2, 0.3, 0.4, 0.5, 0.6])
+ self.assertAllClose(
+ tf_example_list[1].features.feature[
+ 'image/context_features'].float_list.value,
+ [0.1, 0.2, 0.3, 0.4, 0.5, 0.6])
+
+ def assert_expected_sequence_example(self, tf_sequence_example_list):
+ tf_sequence_example = tf_sequence_example_list[0]
+ num_frames = 2
+
+ self.assertAllEqual(
+ tf_sequence_example.context.feature[
+ 'clip/media_id'].bytes_list.value[0], six.ensure_binary(
+ '01_0'))
+ self.assertAllClose(
+ tf_sequence_example.context.feature[
+ 'image/context_features'].float_list.value,
+ [0.1, 0.2, 0.3, 0.4, 0.5, 0.6])
+
+ seq_feature_dict = tf_sequence_example.feature_lists.feature_list
+
+ self.assertLen(
+ seq_feature_dict['image/encoded'].feature[:],
+ num_frames)
+ actual_timestamps = [
+ feature.int64_list.value[0] for feature
+ in seq_feature_dict['image/timestamp'].feature]
+ timestamps = [0, 1]
+ self.assertAllEqual(timestamps, actual_timestamps)
+
+ # First image.
+ self.assertAllClose(
+ [0.4, 0.5],
+ seq_feature_dict['region/bbox/ymin'].feature[0].float_list.value[:])
+ self.assertAllClose(
+ [0.0, 0.1],
+ seq_feature_dict['region/bbox/xmin'].feature[0].float_list.value[:])
+ self.assertAllClose(
+ [0.6, 0.7],
+ seq_feature_dict['region/bbox/ymax'].feature[0].float_list.value[:])
+ self.assertAllClose(
+ [0.2, 0.3],
+ seq_feature_dict['region/bbox/xmax'].feature[0].float_list.value[:])
+ self.assertAllEqual(
+ [six.ensure_binary('hyena'), six.ensure_binary('hyena')],
+ seq_feature_dict['region/label/string'].feature[0].bytes_list.value[:])
+
+ # Second example.
+ self.assertAllClose(
+ [0.2],
+ seq_feature_dict['region/bbox/ymin'].feature[1].float_list.value[:])
+ self.assertAllClose(
+ [0.0],
+ seq_feature_dict['region/bbox/xmin'].feature[1].float_list.value[:])
+ self.assertAllClose(
+ [0.3],
+ seq_feature_dict['region/bbox/ymax'].feature[1].float_list.value[:])
+ self.assertAllClose(
+ [0.1],
+ seq_feature_dict['region/bbox/xmax'].feature[1].float_list.value[:])
+ self.assertAllEqual(
+ [six.ensure_binary('hyena')],
+ seq_feature_dict['region/label/string'].feature[1].bytes_list.value[:])
+
+ def assert_expected_key(self, key):
+ self.assertAllEqual(key, b'01')
+
+ def assert_sorted(self, example_collection):
+ example_list = list(example_collection)
+ counter = 0
+ for example in example_list:
+ frame_num = example.features.feature[
+ 'image/seq_frame_num'].int64_list.value[0]
+ self.assertGreaterEqual(frame_num, counter)
+ counter = frame_num
+
+ def assert_context(self, example_collection):
+ example_list = list(example_collection)
+ for example in example_list:
+ context = example.features.feature[
+ 'image/context_features'].float_list.value
+ self.assertAllClose([0.1, 0.2, 0.3, 0.4, 0.5, 0.6], context)
+
+ def assert_resized(self, example):
+ width = example.features.feature['image/width'].int64_list.value[0]
+ self.assertAllEqual(width, 2)
+ height = example.features.feature['image/height'].int64_list.value[0]
+ self.assertAllEqual(height, 2)
+
+ def assert_size(self, example):
+ width = example.features.feature['image/width'].int64_list.value[0]
+ self.assertAllEqual(width, 4)
+ height = example.features.feature['image/height'].int64_list.value[0]
+ self.assertAllEqual(height, 4)
+
+ def test_sliding_window(self):
+ example_list = ['a', 'b', 'c', 'd', 'e', 'f', 'g']
+ max_clip_length = 3
+ stride_length = 3
+ out_list = [list(i) for i in add_context_to_examples.get_sliding_window(
+ example_list, max_clip_length, stride_length)]
+ self.assertAllEqual(out_list, [['a', 'b', 'c'],
+ ['d', 'e', 'f'],
+ ['g']])
+
+ def test_rekey_data_fn(self):
+ sequence_key = 'image/seq_id'
+ time_horizon = None
+ reduce_image_size = False
+ max_dim = None
+
+ rekey_fn = add_context_to_examples.ReKeyDataFn(
+ sequence_key, time_horizon,
+ reduce_image_size, max_dim)
+ output = rekey_fn.process(self._create_first_tf_example())
+
+ self.assert_expected_key(output[0][0])
+ self.assert_size(output[0][1])
+
+ def test_rekey_data_fn_w_resize(self):
+ sequence_key = 'image/seq_id'
+ time_horizon = None
+ reduce_image_size = True
+ max_dim = 2
+
+ rekey_fn = add_context_to_examples.ReKeyDataFn(
+ sequence_key, time_horizon,
+ reduce_image_size, max_dim)
+ output = rekey_fn.process(self._create_first_tf_example())
+
+ self.assert_expected_key(output[0][0])
+ self.assert_resized(output[0][1])
+
+ def test_sort_fn(self):
+ sequence_key = 'image/seq_id'
+ sorted_image_ids = False
+ max_num_elements_in_context_features = 10
+ sort_fn = add_context_to_examples.SortGroupedDataFn(
+ sequence_key, sorted_image_ids, max_num_elements_in_context_features)
+ output = sort_fn.process(
+ ('dummy_key', [tf.train.Example.FromString(
+ self._create_second_tf_example()),
+ tf.train.Example.FromString(
+ self._create_first_tf_example())]))
+
+ self.assert_sorted(output[0][1])
+
+ def test_add_context_fn(self):
+ sequence_key = 'image/seq_id'
+ add_context_features = True
+ image_ids_to_keep = 'All'
+ context_fn = add_context_to_examples.GenerateContextFn(
+ sequence_key, add_context_features, image_ids_to_keep)
+ output = context_fn.process(
+ ('dummy_key', [tf.train.Example.FromString(
+ self._create_first_tf_example()),
+ tf.train.Example.FromString(
+ self._create_second_tf_example())]))
+
+ self.assertEqual(len(output), 2)
+ self.assert_context(output)
+
+ def test_add_context_fn_output_sequence_example(self):
+ sequence_key = 'image/seq_id'
+ add_context_features = True
+ image_ids_to_keep = 'All'
+ context_fn = add_context_to_examples.GenerateContextFn(
+ sequence_key, add_context_features, image_ids_to_keep,
+ output_type='tf_sequence_example')
+ output = context_fn.process(
+ ('01',
+ [tf.train.Example.FromString(self._create_first_tf_example()),
+ tf.train.Example.FromString(self._create_second_tf_example())]))
+
+ self.assertEqual(len(output), 1)
+ self.assert_expected_sequence_example(output)
+
+ def test_add_context_fn_output_sequence_example_cliplen(self):
+ sequence_key = 'image/seq_id'
+ add_context_features = True
+ image_ids_to_keep = 'All'
+ context_fn = add_context_to_examples.GenerateContextFn(
+ sequence_key, add_context_features, image_ids_to_keep,
+ output_type='tf_sequence_example', max_clip_length=1)
+ output = context_fn.process(
+ ('01',
+ [tf.train.Example.FromString(self._create_first_tf_example()),
+ tf.train.Example.FromString(self._create_second_tf_example())]))
+ self.assertEqual(len(output), 2)
+
+ def test_beam_pipeline(self):
+ with InMemoryTFRecord(
+ [self._create_first_tf_example(),
+ self._create_second_tf_example()]) as input_tfrecord:
+ runner = runners.DirectRunner()
+ temp_dir = tempfile.mkdtemp(dir=os.environ.get('TEST_TMPDIR'))
+ output_tfrecord = os.path.join(temp_dir, 'output_tfrecord')
+ sequence_key = six.ensure_binary('image/seq_id')
+ max_num_elements = 10
+ num_shards = 1
+ pipeline = add_context_to_examples.construct_pipeline(
+ input_tfrecord,
+ output_tfrecord,
+ sequence_key,
+ max_num_elements_in_context_features=max_num_elements,
+ num_shards=num_shards)
+ runner.run(pipeline)
+ filenames = tf.io.gfile.glob(output_tfrecord + '-?????-of-?????')
+ actual_output = []
+ record_iterator = tf.python_io.tf_record_iterator(path=filenames[0])
+ for record in record_iterator:
+ actual_output.append(record)
+ self.assertEqual(len(actual_output), 2)
+ self.assert_expected_examples([tf.train.Example.FromString(
+ tf_example) for tf_example in actual_output])
+
+ def test_beam_pipeline_sequence_example(self):
+ with InMemoryTFRecord(
+ [self._create_first_tf_example(),
+ self._create_second_tf_example()]) as input_tfrecord:
+ runner = runners.DirectRunner()
+ temp_dir = tempfile.mkdtemp(dir=os.environ.get('TEST_TMPDIR'))
+ output_tfrecord = os.path.join(temp_dir, 'output_tfrecord')
+ sequence_key = six.ensure_binary('image/seq_id')
+ max_num_elements = 10
+ num_shards = 1
+ pipeline = add_context_to_examples.construct_pipeline(
+ input_tfrecord,
+ output_tfrecord,
+ sequence_key,
+ max_num_elements_in_context_features=max_num_elements,
+ num_shards=num_shards,
+ output_type='tf_sequence_example')
+ runner.run(pipeline)
+ filenames = tf.io.gfile.glob(output_tfrecord + '-?????-of-?????')
+ actual_output = []
+ record_iterator = tf.python_io.tf_record_iterator(
+ path=filenames[0])
+ for record in record_iterator:
+ actual_output.append(record)
+ self.assertEqual(len(actual_output), 1)
+ self.assert_expected_sequence_example(
+ [tf.train.SequenceExample.FromString(
+ tf_example) for tf_example in actual_output])
+
+if __name__ == '__main__':
+ tf.test.main()
diff --git a/research/object_detection/dataset_tools/context_rcnn/create_cococameratraps_tfexample_main.py b/research/object_detection/dataset_tools/context_rcnn/create_cococameratraps_tfexample_main.py
new file mode 100644
index 0000000000000000000000000000000000000000..106cf5adb94d8d1017a1834de42ab2096d85c67c
--- /dev/null
+++ b/research/object_detection/dataset_tools/context_rcnn/create_cococameratraps_tfexample_main.py
@@ -0,0 +1,324 @@
+# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+r"""Beam pipeline to create COCO Camera Traps Object Detection TFRecords.
+
+Please note that this tool creates sharded output files.
+
+This tool assumes the input annotations are in the COCO Camera Traps json
+format, specified here:
+https://github.com/Microsoft/CameraTraps/blob/master/data_management/README.md
+
+Example usage:
+
+ python create_cococameratraps_tfexample_main.py \
+ --alsologtostderr \
+ --output_tfrecord_prefix="/path/to/output/tfrecord/location/prefix" \
+ --image_directory="/path/to/image/folder/" \
+ --input_annotations_file="path/to/annotations.json"
+
+"""
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import hashlib
+import io
+import json
+import logging
+import os
+from absl import app
+from absl import flags
+import apache_beam as beam
+import numpy as np
+import PIL.Image
+import tensorflow.compat.v1 as tf
+from apache_beam import runners
+from object_detection.utils import dataset_util
+
+flags.DEFINE_string('image_directory', None, 'Directory where images are '
+ 'stored')
+flags.DEFINE_string('output_tfrecord_prefix', None,
+ 'TFRecord containing images in tf.Example format.')
+flags.DEFINE_string('input_annotations_file', None, 'Path to Coco-CameraTraps'
+ 'style annotations file')
+flags.DEFINE_integer('num_images_per_shard',
+ 200,
+ 'The number of images to be stored in each shard.')
+
+FLAGS = flags.FLAGS
+
+
+class ParseImage(beam.DoFn):
+ """A DoFn that parses a COCO-CameraTraps json and emits TFRecords."""
+
+ def __init__(self, image_directory, images, annotations, categories,
+ keep_bboxes):
+ """Initialization function.
+
+ Args:
+ image_directory: Path to image directory
+ images: list of COCO Camera Traps style image dictionaries
+ annotations: list of COCO Camera Traps style annotation dictionaries
+ categories: list of COCO Camera Traps style category dictionaries
+ keep_bboxes: Whether to keep any bounding boxes that exist in the
+ annotations
+ """
+
+ self._image_directory = image_directory
+ self._image_dict = {im['id']: im for im in images}
+ self._annotation_dict = {im['id']: [] for im in images}
+ self._category_dict = {int(cat['id']): cat for cat in categories}
+ for ann in annotations:
+ self._annotation_dict[ann['image_id']].append(ann)
+ self._images = images
+ self._keep_bboxes = keep_bboxes
+
+ self._num_examples_processed = beam.metrics.Metrics.counter(
+ 'cococameratraps_data_generation', 'num_tf_examples_processed')
+
+ def process(self, image_id):
+ """Builds a tf.Example given an image id.
+
+ Args:
+ image_id: the image id of the associated image
+
+ Returns:
+ List of tf.Examples.
+ """
+
+ image = self._image_dict[image_id]
+ annotations = self._annotation_dict[image_id]
+ image_height = image['height']
+ image_width = image['width']
+ filename = image['file_name']
+ image_id = image['id']
+ image_location_id = image['location']
+
+ image_datetime = str(image['date_captured'])
+
+ image_sequence_id = str(image['seq_id'])
+ image_sequence_num_frames = int(image['seq_num_frames'])
+ image_sequence_frame_num = int(image['frame_num'])
+
+ full_path = os.path.join(self._image_directory, filename)
+
+ try:
+ # Ensure the image exists and is not corrupted
+ with tf.io.gfile.GFile(full_path, 'rb') as fid:
+ encoded_jpg = fid.read()
+ encoded_jpg_io = io.BytesIO(encoded_jpg)
+ image = PIL.Image.open(encoded_jpg_io)
+ # Ensure the image can be read by tf
+ with tf.Graph().as_default():
+ image = tf.image.decode_jpeg(encoded_jpg, channels=3)
+ init_op = tf.initialize_all_tables()
+ with tf.Session() as sess:
+ sess.run(init_op)
+ sess.run(image)
+ except Exception as e: # pylint: disable=broad-except
+ # The image file is missing or corrupt
+ tf.logging.error(str(e))
+ return []
+
+ key = hashlib.sha256(encoded_jpg).hexdigest()
+ feature_dict = {
+ 'image/height':
+ dataset_util.int64_feature(image_height),
+ 'image/width':
+ dataset_util.int64_feature(image_width),
+ 'image/filename':
+ dataset_util.bytes_feature(filename.encode('utf8')),
+ 'image/source_id':
+ dataset_util.bytes_feature(str(image_id).encode('utf8')),
+ 'image/key/sha256':
+ dataset_util.bytes_feature(key.encode('utf8')),
+ 'image/encoded':
+ dataset_util.bytes_feature(encoded_jpg),
+ 'image/format':
+ dataset_util.bytes_feature('jpeg'.encode('utf8')),
+ 'image/location':
+ dataset_util.bytes_feature(str(image_location_id).encode('utf8')),
+ 'image/seq_num_frames':
+ dataset_util.int64_feature(image_sequence_num_frames),
+ 'image/seq_frame_num':
+ dataset_util.int64_feature(image_sequence_frame_num),
+ 'image/seq_id':
+ dataset_util.bytes_feature(image_sequence_id.encode('utf8')),
+ 'image/date_captured':
+ dataset_util.bytes_feature(image_datetime.encode('utf8'))
+ }
+
+ num_annotations_skipped = 0
+ if annotations:
+ xmin = []
+ xmax = []
+ ymin = []
+ ymax = []
+ category_names = []
+ category_ids = []
+ area = []
+
+ for object_annotations in annotations:
+ if 'bbox' in object_annotations and self._keep_bboxes:
+ (x, y, width, height) = tuple(object_annotations['bbox'])
+ if width <= 0 or height <= 0:
+ num_annotations_skipped += 1
+ continue
+ if x + width > image_width or y + height > image_height:
+ num_annotations_skipped += 1
+ continue
+ xmin.append(float(x) / image_width)
+ xmax.append(float(x + width) / image_width)
+ ymin.append(float(y) / image_height)
+ ymax.append(float(y + height) / image_height)
+ if 'area' in object_annotations:
+ area.append(object_annotations['area'])
+ else:
+ # approximate area using l*w/2
+ area.append(width*height/2.0)
+
+ category_id = int(object_annotations['category_id'])
+ category_ids.append(category_id)
+ category_names.append(
+ self._category_dict[category_id]['name'].encode('utf8'))
+
+ feature_dict.update({
+ 'image/object/bbox/xmin':
+ dataset_util.float_list_feature(xmin),
+ 'image/object/bbox/xmax':
+ dataset_util.float_list_feature(xmax),
+ 'image/object/bbox/ymin':
+ dataset_util.float_list_feature(ymin),
+ 'image/object/bbox/ymax':
+ dataset_util.float_list_feature(ymax),
+ 'image/object/class/text':
+ dataset_util.bytes_list_feature(category_names),
+ 'image/object/class/label':
+ dataset_util.int64_list_feature(category_ids),
+ 'image/object/area':
+ dataset_util.float_list_feature(area),
+ })
+
+ # For classification, add the first category to image/class/label and
+ # image/class/text
+ if not category_ids:
+ feature_dict.update({
+ 'image/class/label':
+ dataset_util.int64_list_feature([0]),
+ 'image/class/text':
+ dataset_util.bytes_list_feature(['empty'.encode('utf8')]),
+ })
+ else:
+ feature_dict.update({
+ 'image/class/label':
+ dataset_util.int64_list_feature([category_ids[0]]),
+ 'image/class/text':
+ dataset_util.bytes_list_feature([category_names[0]]),
+ })
+
+ else:
+ # Add empty class if there are no annotations
+ feature_dict.update({
+ 'image/class/label':
+ dataset_util.int64_list_feature([0]),
+ 'image/class/text':
+ dataset_util.bytes_list_feature(['empty'.encode('utf8')]),
+ })
+
+ example = tf.train.Example(features=tf.train.Features(feature=feature_dict))
+ self._num_examples_processed.inc(1)
+
+ return [(example)]
+
+
+def _load_json_data(data_file):
+ with tf.io.gfile.GFile(data_file, 'r') as fid:
+ data_dict = json.load(fid)
+ return data_dict
+
+
+def create_pipeline(image_directory,
+ input_annotations_file,
+ output_tfrecord_prefix=None,
+ num_images_per_shard=200,
+ keep_bboxes=True):
+ """Creates a beam pipeline for producing a COCO-CameraTraps Image dataset.
+
+ Args:
+ image_directory: Path to image directory
+ input_annotations_file: Path to a coco-cameratraps annotation file
+ output_tfrecord_prefix: Absolute path for tfrecord outputs. Final files will
+ be named {output_tfrecord_prefix}@N.
+ num_images_per_shard: The number of images to store in each shard
+ keep_bboxes: Whether to keep any bounding boxes that exist in the json file
+
+ Returns:
+ A Beam pipeline.
+ """
+
+ logging.info('Reading data from COCO-CameraTraps Dataset.')
+
+ data = _load_json_data(input_annotations_file)
+
+ num_shards = int(np.ceil(float(len(data['images']))/num_images_per_shard))
+
+ def pipeline(root):
+ """Builds beam pipeline."""
+
+ image_examples = (
+ root
+ | ('CreateCollections') >> beam.Create(
+ [im['id'] for im in data['images']])
+ | ('ParseImage') >> beam.ParDo(ParseImage(
+ image_directory, data['images'], data['annotations'],
+ data['categories'], keep_bboxes=keep_bboxes)))
+ _ = (image_examples
+ | ('Reshuffle') >> beam.Reshuffle()
+ | ('WriteTfImageExample') >> beam.io.tfrecordio.WriteToTFRecord(
+ output_tfrecord_prefix,
+ num_shards=num_shards,
+ coder=beam.coders.ProtoCoder(tf.train.Example)))
+
+ return pipeline
+
+
+def main(_):
+ """Runs the Beam pipeline that performs inference.
+
+ Args:
+ _: unused
+ """
+
+ # must create before flags are used
+ runner = runners.DirectRunner()
+
+ dirname = os.path.dirname(FLAGS.output_tfrecord_prefix)
+ tf.io.gfile.makedirs(dirname)
+
+ runner.run(
+ create_pipeline(
+ image_directory=FLAGS.image_directory,
+ input_annotations_file=FLAGS.input_annotations_file,
+ output_tfrecord_prefix=FLAGS.output_tfrecord_prefix,
+ num_images_per_shard=FLAGS.num_images_per_shard))
+
+
+if __name__ == '__main__':
+ flags.mark_flags_as_required([
+ 'image_directory',
+ 'input_annotations_file',
+ 'output_tfrecord_prefix'
+ ])
+ app.run(main)
diff --git a/research/object_detection/dataset_tools/context_rcnn/create_cococameratraps_tfexample_tf1_test.py b/research/object_detection/dataset_tools/context_rcnn/create_cococameratraps_tfexample_tf1_test.py
new file mode 100644
index 0000000000000000000000000000000000000000..3f3569e1384857641de7c767ae76f0d9023d7291
--- /dev/null
+++ b/research/object_detection/dataset_tools/context_rcnn/create_cococameratraps_tfexample_tf1_test.py
@@ -0,0 +1,201 @@
+# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Tests for create_cococameratraps_tfexample_main."""
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+import datetime
+import json
+import os
+import tempfile
+import unittest
+import numpy as np
+
+from PIL import Image
+import tensorflow.compat.v1 as tf
+from object_detection.dataset_tools.context_rcnn import create_cococameratraps_tfexample_main
+from object_detection.utils import tf_version
+from apache_beam import runners
+
+
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.')
+class CreateCOCOCameraTrapsTfexampleTest(tf.test.TestCase):
+
+ IMAGE_HEIGHT = 360
+ IMAGE_WIDTH = 480
+
+ def _write_random_images_to_directory(self, directory, num_frames):
+ for frame_num in range(num_frames):
+ img = np.random.randint(0, high=256,
+ size=(self.IMAGE_HEIGHT, self.IMAGE_WIDTH, 3),
+ dtype=np.uint8)
+ pil_image = Image.fromarray(img)
+ fname = 'im_' + str(frame_num) + '.jpg'
+ pil_image.save(os.path.join(directory, fname), 'JPEG')
+
+ def _create_json_file(self, directory, num_frames, keep_bboxes=False):
+ json_dict = {'images': [], 'annotations': []}
+ json_dict['categories'] = [{'id': 0, 'name': 'empty'},
+ {'id': 1, 'name': 'animal'}]
+ for idx in range(num_frames):
+ im = {'id': 'im_' + str(idx),
+ 'file_name': 'im_' + str(idx) + '.jpg',
+ 'height': self.IMAGE_HEIGHT,
+ 'width': self.IMAGE_WIDTH,
+ 'seq_id': 'seq_1',
+ 'seq_num_frames': num_frames,
+ 'frame_num': idx,
+ 'location': 'loc_' + str(idx),
+ 'date_captured': str(datetime.datetime.now())
+ }
+ json_dict['images'].append(im)
+ ann = {'id': 'ann' + str(idx),
+ 'image_id': 'im_' + str(idx),
+ 'category_id': 1,
+ }
+ if keep_bboxes:
+ ann['bbox'] = [0.0 * self.IMAGE_WIDTH,
+ 0.1 * self.IMAGE_HEIGHT,
+ 0.5 * self.IMAGE_WIDTH,
+ 0.5 * self.IMAGE_HEIGHT]
+ json_dict['annotations'].append(ann)
+
+ json_path = os.path.join(directory, 'test_file.json')
+ with tf.io.gfile.GFile(json_path, 'w') as f:
+ json.dump(json_dict, f)
+ return json_path
+
+ def assert_expected_example_bbox(self, example):
+ self.assertAllClose(
+ example.features.feature['image/object/bbox/ymin'].float_list.value,
+ [0.1])
+ self.assertAllClose(
+ example.features.feature['image/object/bbox/xmin'].float_list.value,
+ [0.0])
+ self.assertAllClose(
+ example.features.feature['image/object/bbox/ymax'].float_list.value,
+ [0.6])
+ self.assertAllClose(
+ example.features.feature['image/object/bbox/xmax'].float_list.value,
+ [0.5])
+ self.assertAllClose(
+ example.features.feature['image/object/class/label']
+ .int64_list.value, [1])
+ self.assertAllEqual(
+ example.features.feature['image/object/class/text']
+ .bytes_list.value, [b'animal'])
+ self.assertAllClose(
+ example.features.feature['image/class/label']
+ .int64_list.value, [1])
+ self.assertAllEqual(
+ example.features.feature['image/class/text']
+ .bytes_list.value, [b'animal'])
+
+ # Check other essential attributes.
+ self.assertAllEqual(
+ example.features.feature['image/height'].int64_list.value,
+ [self.IMAGE_HEIGHT])
+ self.assertAllEqual(
+ example.features.feature['image/width'].int64_list.value,
+ [self.IMAGE_WIDTH])
+ self.assertAllEqual(
+ example.features.feature['image/source_id'].bytes_list.value,
+ [b'im_0'])
+ self.assertTrue(
+ example.features.feature['image/encoded'].bytes_list.value)
+
+ def assert_expected_example(self, example):
+ self.assertAllClose(
+ example.features.feature['image/object/bbox/ymin'].float_list.value,
+ [])
+ self.assertAllClose(
+ example.features.feature['image/object/bbox/xmin'].float_list.value,
+ [])
+ self.assertAllClose(
+ example.features.feature['image/object/bbox/ymax'].float_list.value,
+ [])
+ self.assertAllClose(
+ example.features.feature['image/object/bbox/xmax'].float_list.value,
+ [])
+ self.assertAllClose(
+ example.features.feature['image/object/class/label']
+ .int64_list.value, [1])
+ self.assertAllEqual(
+ example.features.feature['image/object/class/text']
+ .bytes_list.value, [b'animal'])
+ self.assertAllClose(
+ example.features.feature['image/class/label']
+ .int64_list.value, [1])
+ self.assertAllEqual(
+ example.features.feature['image/class/text']
+ .bytes_list.value, [b'animal'])
+
+ # Check other essential attributes.
+ self.assertAllEqual(
+ example.features.feature['image/height'].int64_list.value,
+ [self.IMAGE_HEIGHT])
+ self.assertAllEqual(
+ example.features.feature['image/width'].int64_list.value,
+ [self.IMAGE_WIDTH])
+ self.assertAllEqual(
+ example.features.feature['image/source_id'].bytes_list.value,
+ [b'im_0'])
+ self.assertTrue(
+ example.features.feature['image/encoded'].bytes_list.value)
+
+ def test_beam_pipeline(self):
+ runner = runners.DirectRunner()
+ num_frames = 1
+ temp_dir = tempfile.mkdtemp(dir=os.environ.get('TEST_TMPDIR'))
+ json_path = self._create_json_file(temp_dir, num_frames)
+ output_tfrecord = temp_dir+'/output'
+ self._write_random_images_to_directory(temp_dir, num_frames)
+ pipeline = create_cococameratraps_tfexample_main.create_pipeline(
+ temp_dir, json_path,
+ output_tfrecord_prefix=output_tfrecord)
+ runner.run(pipeline)
+ filenames = tf.io.gfile.glob(output_tfrecord + '-?????-of-?????')
+ actual_output = []
+ record_iterator = tf.python_io.tf_record_iterator(path=filenames[0])
+ for record in record_iterator:
+ actual_output.append(record)
+ self.assertEqual(len(actual_output), num_frames)
+ self.assert_expected_example(tf.train.Example.FromString(
+ actual_output[0]))
+
+ def test_beam_pipeline_bbox(self):
+ runner = runners.DirectRunner()
+ num_frames = 1
+ temp_dir = tempfile.mkdtemp(dir=os.environ.get('TEST_TMPDIR'))
+ json_path = self._create_json_file(temp_dir, num_frames, keep_bboxes=True)
+ output_tfrecord = temp_dir+'/output'
+ self._write_random_images_to_directory(temp_dir, num_frames)
+ pipeline = create_cococameratraps_tfexample_main.create_pipeline(
+ temp_dir, json_path,
+ output_tfrecord_prefix=output_tfrecord,
+ keep_bboxes=True)
+ runner.run(pipeline)
+ filenames = tf.io.gfile.glob(output_tfrecord+'-?????-of-?????')
+ actual_output = []
+ record_iterator = tf.python_io.tf_record_iterator(path=filenames[0])
+ for record in record_iterator:
+ actual_output.append(record)
+ self.assertEqual(len(actual_output), num_frames)
+ self.assert_expected_example_bbox(tf.train.Example.FromString(
+ actual_output[0]))
+
+
+if __name__ == '__main__':
+ tf.test.main()
diff --git a/research/object_detection/dataset_tools/context_rcnn/generate_detection_data.py b/research/object_detection/dataset_tools/context_rcnn/generate_detection_data.py
new file mode 100644
index 0000000000000000000000000000000000000000..95c16c1358a15ece03aaa9e80353e1ebf2c17166
--- /dev/null
+++ b/research/object_detection/dataset_tools/context_rcnn/generate_detection_data.py
@@ -0,0 +1,262 @@
+# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+r"""A Beam job to generate detection data for camera trap images.
+
+This tools allows to run inference with an exported Object Detection model in
+`saved_model` format and produce raw detection boxes on images in tf.Examples,
+with the assumption that the bounding box class label will match the image-level
+class label in the tf.Example.
+
+Steps to generate a detection dataset:
+1. Use object_detection/export_inference_graph.py to get a `saved_model` for
+ inference. The input node must accept a tf.Example proto.
+2. Run this tool with `saved_model` from step 1 and an TFRecord of tf.Example
+ protos containing images for inference.
+
+Example Usage:
+--------------
+python tensorflow_models/object_detection/export_inference_graph.py \
+ --alsologtostderr \
+ --input_type tf_example \
+ --pipeline_config_path path/to/detection_model.config \
+ --trained_checkpoint_prefix path/to/model.ckpt \
+ --output_directory path/to/exported_model_directory
+
+python generate_detection_data.py \
+ --alsologtostderr \
+ --input_tfrecord path/to/input_tfrecord@X \
+ --output_tfrecord path/to/output_tfrecord@X \
+ --model_dir path/to/exported_model_directory/saved_model
+"""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import os
+import threading
+from absl import app
+from absl import flags
+import apache_beam as beam
+import tensorflow.compat.v1 as tf
+from apache_beam import runners
+
+
+flags.DEFINE_string('detection_input_tfrecord', None, 'TFRecord containing '
+ 'images in tf.Example format for object detection.')
+flags.DEFINE_string('detection_output_tfrecord', None,
+ 'TFRecord containing detections in tf.Example format.')
+flags.DEFINE_string('detection_model_dir', None, 'Path to directory containing'
+ 'an object detection SavedModel.')
+flags.DEFINE_float('confidence_threshold', 0.9,
+ 'Min confidence to keep bounding boxes')
+flags.DEFINE_integer('num_shards', 0, 'Number of output shards.')
+
+FLAGS = flags.FLAGS
+
+
+class GenerateDetectionDataFn(beam.DoFn):
+ """Generates detection data for camera trap images.
+
+ This Beam DoFn performs inference with an object detection `saved_model` and
+ produces detection boxes for camera trap data, matched to the
+ object class.
+ """
+ session_lock = threading.Lock()
+
+ def __init__(self, model_dir, confidence_threshold):
+ """Initialization function.
+
+ Args:
+ model_dir: A directory containing saved model.
+ confidence_threshold: the confidence threshold for boxes to keep
+ """
+ self._model_dir = model_dir
+ self._confidence_threshold = confidence_threshold
+ self._session = None
+ self._num_examples_processed = beam.metrics.Metrics.counter(
+ 'detection_data_generation', 'num_tf_examples_processed')
+
+ def start_bundle(self):
+ self._load_inference_model()
+
+ def _load_inference_model(self):
+ # Because initialization of the tf.Session is expensive we share
+ # one instance across all threads in the worker. This is possible since
+ # tf.Session.run() is thread safe.
+ with self.session_lock:
+ if self._session is None:
+ graph = tf.Graph()
+ self._session = tf.Session(graph=graph)
+ with graph.as_default():
+ meta_graph = tf.saved_model.loader.load(
+ self._session, [tf.saved_model.tag_constants.SERVING],
+ self._model_dir)
+ signature = meta_graph.signature_def['serving_default']
+ input_tensor_name = signature.inputs['inputs'].name
+ self._input = graph.get_tensor_by_name(input_tensor_name)
+ self._boxes_node = graph.get_tensor_by_name(
+ signature.outputs['detection_boxes'].name)
+ self._scores_node = graph.get_tensor_by_name(
+ signature.outputs['detection_scores'].name)
+ self._num_detections_node = graph.get_tensor_by_name(
+ signature.outputs['num_detections'].name)
+
+ def process(self, tfrecord_entry):
+ return self._run_inference_and_generate_detections(tfrecord_entry)
+
+ def _run_inference_and_generate_detections(self, tfrecord_entry):
+ input_example = tf.train.Example.FromString(tfrecord_entry)
+ if input_example.features.feature[
+ 'image/object/bbox/ymin'].float_list.value:
+ # There are already ground truth boxes for this image, just keep them.
+ return [input_example]
+
+ detection_boxes, detection_scores, num_detections = self._session.run(
+ [self._boxes_node, self._scores_node, self._num_detections_node],
+ feed_dict={self._input: [tfrecord_entry]})
+
+ example = tf.train.Example()
+
+ num_detections = int(num_detections[0])
+
+ image_class_labels = input_example.features.feature[
+ 'image/object/class/label'].int64_list.value
+
+ image_class_texts = input_example.features.feature[
+ 'image/object/class/text'].bytes_list.value
+
+ # Ignore any images with multiple classes,
+ # we can't match the class to the box.
+ if len(image_class_labels) > 1:
+ return []
+
+ # Don't add boxes for images already labeled empty (for now)
+ if len(image_class_labels) == 1:
+ # Add boxes over confidence threshold.
+ for idx, score in enumerate(detection_scores[0]):
+ if score >= self._confidence_threshold and idx < num_detections:
+ example.features.feature[
+ 'image/object/bbox/ymin'].float_list.value.extend([
+ detection_boxes[0, idx, 0]])
+ example.features.feature[
+ 'image/object/bbox/xmin'].float_list.value.extend([
+ detection_boxes[0, idx, 1]])
+ example.features.feature[
+ 'image/object/bbox/ymax'].float_list.value.extend([
+ detection_boxes[0, idx, 2]])
+ example.features.feature[
+ 'image/object/bbox/xmax'].float_list.value.extend([
+ detection_boxes[0, idx, 3]])
+
+ # Add box scores and class texts and labels.
+ example.features.feature[
+ 'image/object/class/score'].float_list.value.extend(
+ [score])
+
+ example.features.feature[
+ 'image/object/class/label'].int64_list.value.extend(
+ [image_class_labels[0]])
+
+ example.features.feature[
+ 'image/object/class/text'].bytes_list.value.extend(
+ [image_class_texts[0]])
+
+ # Add other essential example attributes
+ example.features.feature['image/encoded'].bytes_list.value.extend(
+ input_example.features.feature['image/encoded'].bytes_list.value)
+ example.features.feature['image/height'].int64_list.value.extend(
+ input_example.features.feature['image/height'].int64_list.value)
+ example.features.feature['image/width'].int64_list.value.extend(
+ input_example.features.feature['image/width'].int64_list.value)
+ example.features.feature['image/source_id'].bytes_list.value.extend(
+ input_example.features.feature['image/source_id'].bytes_list.value)
+ example.features.feature['image/location'].bytes_list.value.extend(
+ input_example.features.feature['image/location'].bytes_list.value)
+
+ example.features.feature['image/date_captured'].bytes_list.value.extend(
+ input_example.features.feature['image/date_captured'].bytes_list.value)
+
+ example.features.feature['image/class/text'].bytes_list.value.extend(
+ input_example.features.feature['image/class/text'].bytes_list.value)
+ example.features.feature['image/class/label'].int64_list.value.extend(
+ input_example.features.feature['image/class/label'].int64_list.value)
+
+ example.features.feature['image/seq_id'].bytes_list.value.extend(
+ input_example.features.feature['image/seq_id'].bytes_list.value)
+ example.features.feature['image/seq_num_frames'].int64_list.value.extend(
+ input_example.features.feature['image/seq_num_frames'].int64_list.value)
+ example.features.feature['image/seq_frame_num'].int64_list.value.extend(
+ input_example.features.feature['image/seq_frame_num'].int64_list.value)
+
+ self._num_examples_processed.inc(1)
+ return [example]
+
+
+def construct_pipeline(input_tfrecord, output_tfrecord, model_dir,
+ confidence_threshold, num_shards):
+ """Returns a Beam pipeline to run object detection inference.
+
+ Args:
+ input_tfrecord: A TFRecord of tf.train.Example protos containing images.
+ output_tfrecord: A TFRecord of tf.train.Example protos that contain images
+ in the input TFRecord and the detections from the model.
+ model_dir: Path to `saved_model` to use for inference.
+ confidence_threshold: Threshold to use when keeping detection results.
+ num_shards: The number of output shards.
+ Returns:
+ pipeline: A Beam pipeline.
+ """
+ def pipeline(root):
+ input_collection = (
+ root | 'ReadInputTFRecord' >> beam.io.tfrecordio.ReadFromTFRecord(
+ input_tfrecord,
+ coder=beam.coders.BytesCoder()))
+ output_collection = input_collection | 'RunInference' >> beam.ParDo(
+ GenerateDetectionDataFn(model_dir, confidence_threshold))
+ output_collection = output_collection | 'Reshuffle' >> beam.Reshuffle()
+ _ = output_collection | 'WritetoDisk' >> beam.io.tfrecordio.WriteToTFRecord(
+ output_tfrecord,
+ num_shards=num_shards,
+ coder=beam.coders.ProtoCoder(tf.train.Example))
+ return pipeline
+
+
+def main(_):
+ """Runs the Beam pipeline that performs inference.
+
+ Args:
+ _: unused
+ """
+ # must create before flags are used
+ runner = runners.DirectRunner()
+
+ dirname = os.path.dirname(FLAGS.detection_output_tfrecord)
+ tf.io.gfile.makedirs(dirname)
+ runner.run(
+ construct_pipeline(FLAGS.detection_input_tfrecord,
+ FLAGS.detection_output_tfrecord,
+ FLAGS.detection_model_dir,
+ FLAGS.confidence_threshold,
+ FLAGS.num_shards))
+
+
+if __name__ == '__main__':
+ flags.mark_flags_as_required([
+ 'detection_input_tfrecord',
+ 'detection_output_tfrecord',
+ 'detection_model_dir'
+ ])
+ app.run(main)
diff --git a/research/object_detection/dataset_tools/context_rcnn/generate_detection_data_tf1_test.py b/research/object_detection/dataset_tools/context_rcnn/generate_detection_data_tf1_test.py
new file mode 100644
index 0000000000000000000000000000000000000000..279183110b2e60d4dedd56af80a7cb45d33a8367
--- /dev/null
+++ b/research/object_detection/dataset_tools/context_rcnn/generate_detection_data_tf1_test.py
@@ -0,0 +1,270 @@
+# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Tests for generate_detection_data."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import contextlib
+import os
+import tempfile
+import unittest
+import numpy as np
+import six
+import tensorflow.compat.v1 as tf
+
+from object_detection import exporter
+from object_detection.builders import model_builder
+from object_detection.core import model
+from object_detection.dataset_tools.context_rcnn import generate_detection_data
+from object_detection.protos import pipeline_pb2
+from object_detection.utils import tf_version
+from apache_beam import runners
+
+if six.PY2:
+ import mock # pylint: disable=g-import-not-at-top
+else:
+ mock = unittest.mock
+
+
+class FakeModel(model.DetectionModel):
+ """A Fake Detection model with expected output nodes from post-processing."""
+
+ def preprocess(self, inputs):
+ true_image_shapes = [] # Doesn't matter for the fake model.
+ return tf.identity(inputs), true_image_shapes
+
+ def predict(self, preprocessed_inputs, true_image_shapes):
+ return {'image': tf.layers.conv2d(preprocessed_inputs, 3, 1)}
+
+ def postprocess(self, prediction_dict, true_image_shapes):
+ with tf.control_dependencies(prediction_dict.values()):
+ postprocessed_tensors = {
+ 'detection_boxes': tf.constant([[[0.0, 0.1, 0.5, 0.6],
+ [0.5, 0.5, 0.8, 0.8]]], tf.float32),
+ 'detection_scores': tf.constant([[0.95, 0.6]], tf.float32),
+ 'detection_multiclass_scores': tf.constant([[[0.1, 0.7, 0.2],
+ [0.3, 0.1, 0.6]]],
+ tf.float32),
+ 'detection_classes': tf.constant([[0, 1]], tf.float32),
+ 'num_detections': tf.constant([2], tf.float32)
+ }
+ return postprocessed_tensors
+
+ def restore_map(self, checkpoint_path, fine_tune_checkpoint_type):
+ pass
+
+ def restore_from_objects(self, fine_tune_checkpoint_type):
+ pass
+
+ def loss(self, prediction_dict, true_image_shapes):
+ pass
+
+ def regularization_losses(self):
+ pass
+
+ def updates(self):
+ pass
+
+
+@contextlib.contextmanager
+def InMemoryTFRecord(entries):
+ temp = tempfile.NamedTemporaryFile(delete=False)
+ filename = temp.name
+ try:
+ with tf.python_io.TFRecordWriter(filename) as writer:
+ for value in entries:
+ writer.write(value)
+ yield filename
+ finally:
+ os.unlink(filename)
+
+
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.')
+class GenerateDetectionDataTest(tf.test.TestCase):
+
+ def _save_checkpoint_from_mock_model(self, checkpoint_path):
+ """A function to save checkpoint from a fake Detection Model.
+
+ Args:
+ checkpoint_path: Path to save checkpoint from Fake model.
+ """
+ g = tf.Graph()
+ with g.as_default():
+ mock_model = FakeModel(num_classes=5)
+ preprocessed_inputs, true_image_shapes = mock_model.preprocess(
+ tf.placeholder(tf.float32, shape=[None, None, None, 3]))
+ predictions = mock_model.predict(preprocessed_inputs, true_image_shapes)
+ mock_model.postprocess(predictions, true_image_shapes)
+ tf.train.get_or_create_global_step()
+ saver = tf.train.Saver()
+ init = tf.global_variables_initializer()
+ with self.test_session(graph=g) as sess:
+ sess.run(init)
+ saver.save(sess, checkpoint_path)
+
+ def _export_saved_model(self):
+ tmp_dir = self.get_temp_dir()
+ checkpoint_path = os.path.join(tmp_dir, 'model.ckpt')
+ self._save_checkpoint_from_mock_model(checkpoint_path)
+ output_directory = os.path.join(tmp_dir, 'output')
+ saved_model_path = os.path.join(output_directory, 'saved_model')
+ tf.io.gfile.makedirs(output_directory)
+ with mock.patch.object(
+ model_builder, 'build', autospec=True) as mock_builder:
+ mock_builder.return_value = FakeModel(num_classes=5)
+ pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
+ pipeline_config.eval_config.use_moving_averages = False
+ detection_model = model_builder.build(pipeline_config.model,
+ is_training=False)
+ outputs, placeholder_tensor = exporter.build_detection_graph(
+ input_type='tf_example',
+ detection_model=detection_model,
+ input_shape=None,
+ output_collection_name='inference_op',
+ graph_hook_fn=None)
+ output_node_names = ','.join(outputs.keys())
+ saver = tf.train.Saver()
+ input_saver_def = saver.as_saver_def()
+ frozen_graph_def = exporter.freeze_graph_with_def_protos(
+ input_graph_def=tf.get_default_graph().as_graph_def(),
+ input_saver_def=input_saver_def,
+ input_checkpoint=checkpoint_path,
+ output_node_names=output_node_names,
+ restore_op_name='save/restore_all',
+ filename_tensor_name='save/Const:0',
+ output_graph='',
+ clear_devices=True,
+ initializer_nodes='')
+ exporter.write_saved_model(
+ saved_model_path=saved_model_path,
+ frozen_graph_def=frozen_graph_def,
+ inputs=placeholder_tensor,
+ outputs=outputs)
+ return saved_model_path
+
+ def _create_tf_example(self):
+ with self.test_session():
+ encoded_image = tf.image.encode_jpeg(
+ tf.constant(np.ones((4, 6, 3)).astype(np.uint8))).eval()
+
+ def BytesFeature(value):
+ return tf.train.Feature(bytes_list=tf.train.BytesList(value=[value]))
+
+ def Int64Feature(value):
+ return tf.train.Feature(int64_list=tf.train.Int64List(value=[value]))
+
+ example = tf.train.Example(features=tf.train.Features(feature={
+ 'image/encoded': BytesFeature(encoded_image),
+ 'image/source_id': BytesFeature(b'image_id'),
+ 'image/height': Int64Feature(4),
+ 'image/width': Int64Feature(6),
+ 'image/object/class/label': Int64Feature(5),
+ 'image/object/class/text': BytesFeature(b'hyena'),
+ 'image/class/label': Int64Feature(5),
+ 'image/class/text': BytesFeature(b'hyena'),
+ }))
+
+ return example.SerializeToString()
+
+ def assert_expected_example(self, example):
+ self.assertAllClose(
+ example.features.feature['image/object/bbox/ymin'].float_list.value,
+ [0.0])
+ self.assertAllClose(
+ example.features.feature['image/object/bbox/xmin'].float_list.value,
+ [0.1])
+ self.assertAllClose(
+ example.features.feature['image/object/bbox/ymax'].float_list.value,
+ [0.5])
+ self.assertAllClose(
+ example.features.feature['image/object/bbox/xmax'].float_list.value,
+ [0.6])
+ self.assertAllClose(
+ example.features.feature['image/object/class/score']
+ .float_list.value, [0.95])
+ self.assertAllClose(
+ example.features.feature['image/object/class/label']
+ .int64_list.value, [5])
+ self.assertAllEqual(
+ example.features.feature['image/object/class/text']
+ .bytes_list.value, [b'hyena'])
+ self.assertAllClose(
+ example.features.feature['image/class/label']
+ .int64_list.value, [5])
+ self.assertAllEqual(
+ example.features.feature['image/class/text']
+ .bytes_list.value, [b'hyena'])
+
+ # Check other essential attributes.
+ self.assertAllEqual(
+ example.features.feature['image/height'].int64_list.value, [4])
+ self.assertAllEqual(
+ example.features.feature['image/width'].int64_list.value, [6])
+ self.assertAllEqual(
+ example.features.feature['image/source_id'].bytes_list.value,
+ [b'image_id'])
+ self.assertTrue(
+ example.features.feature['image/encoded'].bytes_list.value)
+
+ def test_generate_detection_data_fn(self):
+ saved_model_path = self._export_saved_model()
+ confidence_threshold = 0.8
+ inference_fn = generate_detection_data.GenerateDetectionDataFn(
+ saved_model_path, confidence_threshold)
+ inference_fn.start_bundle()
+ generated_example = self._create_tf_example()
+ self.assertAllEqual(tf.train.Example.FromString(
+ generated_example).features.feature['image/object/class/label']
+ .int64_list.value, [5])
+ self.assertAllEqual(tf.train.Example.FromString(
+ generated_example).features.feature['image/object/class/text']
+ .bytes_list.value, [b'hyena'])
+ output = inference_fn.process(generated_example)
+ output_example = output[0]
+
+ self.assertAllEqual(
+ output_example.features.feature['image/object/class/label']
+ .int64_list.value, [5])
+ self.assertAllEqual(output_example.features.feature['image/width']
+ .int64_list.value, [6])
+
+ self.assert_expected_example(output_example)
+
+ def test_beam_pipeline(self):
+ with InMemoryTFRecord([self._create_tf_example()]) as input_tfrecord:
+ runner = runners.DirectRunner()
+ temp_dir = tempfile.mkdtemp(dir=os.environ.get('TEST_TMPDIR'))
+ output_tfrecord = os.path.join(temp_dir, 'output_tfrecord')
+ saved_model_path = self._export_saved_model()
+ confidence_threshold = 0.8
+ num_shards = 1
+ pipeline = generate_detection_data.construct_pipeline(
+ input_tfrecord, output_tfrecord, saved_model_path,
+ confidence_threshold, num_shards)
+ runner.run(pipeline)
+ filenames = tf.io.gfile.glob(output_tfrecord + '-?????-of-?????')
+ actual_output = []
+ record_iterator = tf.python_io.tf_record_iterator(path=filenames[0])
+ for record in record_iterator:
+ actual_output.append(record)
+ self.assertEqual(len(actual_output), 1)
+ self.assert_expected_example(tf.train.Example.FromString(
+ actual_output[0]))
+
+
+if __name__ == '__main__':
+ tf.test.main()
diff --git a/research/object_detection/dataset_tools/context_rcnn/generate_embedding_data.py b/research/object_detection/dataset_tools/context_rcnn/generate_embedding_data.py
new file mode 100644
index 0000000000000000000000000000000000000000..a147c4e88339f44ff417dc38b60cff28ffe010ed
--- /dev/null
+++ b/research/object_detection/dataset_tools/context_rcnn/generate_embedding_data.py
@@ -0,0 +1,378 @@
+# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+r"""A Beam job to generate embedding data for camera trap images.
+
+This tool runs inference with an exported Object Detection model in
+`saved_model` format and produce raw embeddings for camera trap data. These
+embeddings contain an object-centric feature embedding from Faster R-CNN, the
+datetime that the image was taken (normalized in a specific way), and the
+position of the object of interest. By default, only the highest-scoring object
+embedding is included.
+
+Steps to generate a embedding dataset:
+1. Use object_detection/export_inference_graph.py to get a Faster R-CNN
+ `saved_model` for inference. The input node must accept a tf.Example proto.
+2. Run this tool with `saved_model` from step 1 and an TFRecord of tf.Example
+ protos containing images for inference.
+
+Example Usage:
+--------------
+python tensorflow_models/object_detection/export_inference_graph.py \
+ --alsologtostderr \
+ --input_type tf_example \
+ --pipeline_config_path path/to/faster_rcnn_model.config \
+ --trained_checkpoint_prefix path/to/model.ckpt \
+ --output_directory path/to/exported_model_directory
+
+python generate_embedding_data.py \
+ --alsologtostderr \
+ --embedding_input_tfrecord path/to/input_tfrecords* \
+ --embedding_output_tfrecord path/to/output_tfrecords \
+ --embedding_model_dir path/to/exported_model_directory/saved_model
+"""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import datetime
+import os
+import threading
+from absl import app
+from absl import flags
+import apache_beam as beam
+import numpy as np
+import six
+import tensorflow.compat.v1 as tf
+from apache_beam import runners
+
+flags.DEFINE_string('embedding_input_tfrecord', None, 'TFRecord containing'
+ 'images in tf.Example format for object detection.')
+flags.DEFINE_string('embedding_output_tfrecord', None,
+ 'TFRecord containing embeddings in tf.Example format.')
+flags.DEFINE_string('embedding_model_dir', None, 'Path to directory containing'
+ 'an object detection SavedModel with'
+ 'detection_box_classifier_features in the output.')
+flags.DEFINE_integer('top_k_embedding_count', 1,
+ 'The number of top k embeddings to add to the memory bank.'
+ )
+flags.DEFINE_integer('bottom_k_embedding_count', 0,
+ 'The number of bottom k embeddings to add to the memory '
+ 'bank.')
+flags.DEFINE_integer('num_shards', 0, 'Number of output shards.')
+
+
+FLAGS = flags.FLAGS
+
+
+class GenerateEmbeddingDataFn(beam.DoFn):
+ """Generates embedding data for camera trap images.
+
+ This Beam DoFn performs inference with an object detection `saved_model` and
+ produces contextual embedding vectors.
+ """
+ session_lock = threading.Lock()
+
+ def __init__(self, model_dir, top_k_embedding_count,
+ bottom_k_embedding_count):
+ """Initialization function.
+
+ Args:
+ model_dir: A directory containing saved model.
+ top_k_embedding_count: the number of high-confidence embeddings to store
+ bottom_k_embedding_count: the number of low-confidence embeddings to store
+ """
+ self._model_dir = model_dir
+ self._session = None
+ self._num_examples_processed = beam.metrics.Metrics.counter(
+ 'embedding_data_generation', 'num_tf_examples_processed')
+ self._top_k_embedding_count = top_k_embedding_count
+ self._bottom_k_embedding_count = bottom_k_embedding_count
+
+ def start_bundle(self):
+ self._load_inference_model()
+
+ def _load_inference_model(self):
+ # Because initialization of the tf.Session is expensive we share
+ # one instance across all threads in the worker. This is possible since
+ # tf.Session.run() is thread safe.
+ with self.session_lock:
+ if self._session is None:
+ graph = tf.Graph()
+ self._session = tf.Session(graph=graph)
+ with graph.as_default():
+ meta_graph = tf.saved_model.loader.load(
+ self._session, [tf.saved_model.tag_constants.SERVING],
+ self._model_dir)
+ signature = meta_graph.signature_def['serving_default']
+ input_tensor_name = signature.inputs['inputs'].name
+ detection_features_name = signature.outputs['detection_features'].name
+ detection_boxes_name = signature.outputs['detection_boxes'].name
+ num_detections_name = signature.outputs['num_detections'].name
+ self._input = graph.get_tensor_by_name(input_tensor_name)
+ self._embedding_node = graph.get_tensor_by_name(detection_features_name)
+ self._box_node = graph.get_tensor_by_name(detection_boxes_name)
+ self._scores_node = graph.get_tensor_by_name(
+ signature.outputs['detection_scores'].name)
+ self._num_detections = graph.get_tensor_by_name(num_detections_name)
+ tf.logging.info(signature.outputs['detection_features'].name)
+ tf.logging.info(signature.outputs['detection_boxes'].name)
+ tf.logging.info(signature.outputs['num_detections'].name)
+
+ def process(self, tfrecord_entry):
+ return self._run_inference_and_generate_embedding(tfrecord_entry)
+
+ def _run_inference_and_generate_embedding(self, tfrecord_entry):
+ input_example = tf.train.Example.FromString(tfrecord_entry)
+ # Convert date_captured datetime string to unix time integer and store
+
+ def get_date_captured(example):
+ date_captured = datetime.datetime.strptime(
+ six.ensure_str(
+ example.features.feature[
+ 'image/date_captured'].bytes_list.value[0]),
+ '%Y-%m-%d %H:%M:%S')
+ return date_captured
+
+ try:
+ date_captured = get_date_captured(input_example)
+ except Exception: # pylint: disable=broad-except
+ # we require date_captured to be available for all images
+ return []
+
+ def embed_date_captured(date_captured):
+ """Encodes the datetime of the image."""
+ embedded_date_captured = []
+ month_max = 12.0
+ day_max = 31.0
+ hour_max = 24.0
+ minute_max = 60.0
+ min_year = 1990.0
+ max_year = 2030.0
+
+ year = (date_captured.year-min_year)/float(max_year-min_year)
+ embedded_date_captured.append(year)
+
+ month = (date_captured.month-1)/month_max
+ embedded_date_captured.append(month)
+
+ day = (date_captured.day-1)/day_max
+ embedded_date_captured.append(day)
+
+ hour = date_captured.hour/hour_max
+ embedded_date_captured.append(hour)
+
+ minute = date_captured.minute/minute_max
+ embedded_date_captured.append(minute)
+
+ return np.asarray(embedded_date_captured)
+
+ def embed_position_and_size(box):
+ """Encodes the bounding box of the object of interest."""
+ ymin = box[0]
+ xmin = box[1]
+ ymax = box[2]
+ xmax = box[3]
+ w = xmax - xmin
+ h = ymax - ymin
+ x = xmin + w / 2.0
+ y = ymin + h / 2.0
+ return np.asarray([x, y, w, h])
+
+ unix_time = (
+ (date_captured - datetime.datetime.fromtimestamp(0)).total_seconds())
+
+ example = tf.train.Example()
+ example.features.feature['image/unix_time'].float_list.value.extend(
+ [unix_time])
+
+ (detection_features, detection_boxes, num_detections,
+ detection_scores) = self._session.run(
+ [
+ self._embedding_node, self._box_node, self._num_detections[0],
+ self._scores_node
+ ],
+ feed_dict={self._input: [tfrecord_entry]})
+
+ num_detections = int(num_detections)
+ embed_all = []
+ score_all = []
+
+ detection_features = np.asarray(detection_features)
+
+ def get_bb_embedding(detection_features, detection_boxes, detection_scores,
+ index):
+ embedding = detection_features[0][index]
+ pooled_embedding = np.mean(np.mean(embedding, axis=1), axis=0)
+
+ box = detection_boxes[0][index]
+ position_embedding = embed_position_and_size(box)
+
+ score = detection_scores[0][index]
+ return np.concatenate((pooled_embedding, position_embedding)), score
+
+ temporal_embedding = embed_date_captured(date_captured)
+
+ embedding_count = 0
+ for index in range(min(num_detections, self._top_k_embedding_count)):
+ bb_embedding, score = get_bb_embedding(
+ detection_features, detection_boxes, detection_scores, index)
+ embed_all.extend(bb_embedding)
+ embed_all.extend(temporal_embedding)
+ score_all.append(score)
+ embedding_count += 1
+
+ for index in range(
+ max(0, num_detections - 1),
+ max(-1, num_detections - 1 - self._bottom_k_embedding_count), -1):
+ bb_embedding, score = get_bb_embedding(
+ detection_features, detection_boxes, detection_scores, index)
+ embed_all.extend(bb_embedding)
+ embed_all.extend(temporal_embedding)
+ score_all.append(score)
+ embedding_count += 1
+
+ if embedding_count == 0:
+ bb_embedding, score = get_bb_embedding(
+ detection_features, detection_boxes, detection_scores, 0)
+ embed_all.extend(bb_embedding)
+ embed_all.extend(temporal_embedding)
+ score_all.append(score)
+
+ # Takes max in case embedding_count is 0.
+ embedding_length = len(embed_all) // max(1, embedding_count)
+
+ embed_all = np.asarray(embed_all)
+
+ example.features.feature['image/embedding'].float_list.value.extend(
+ embed_all)
+ example.features.feature['image/embedding_score'].float_list.value.extend(
+ score_all)
+ example.features.feature['image/embedding_length'].int64_list.value.append(
+ embedding_length)
+ example.features.feature['image/embedding_count'].int64_list.value.append(
+ embedding_count)
+
+ # Add other essential example attributes
+ example.features.feature['image/encoded'].bytes_list.value.extend(
+ input_example.features.feature['image/encoded'].bytes_list.value)
+ example.features.feature['image/height'].int64_list.value.extend(
+ input_example.features.feature['image/height'].int64_list.value)
+ example.features.feature['image/width'].int64_list.value.extend(
+ input_example.features.feature['image/width'].int64_list.value)
+ example.features.feature['image/source_id'].bytes_list.value.extend(
+ input_example.features.feature['image/source_id'].bytes_list.value)
+ example.features.feature['image/location'].bytes_list.value.extend(
+ input_example.features.feature['image/location'].bytes_list.value)
+
+ example.features.feature['image/date_captured'].bytes_list.value.extend(
+ input_example.features.feature['image/date_captured'].bytes_list.value)
+
+ example.features.feature['image/class/text'].bytes_list.value.extend(
+ input_example.features.feature['image/class/text'].bytes_list.value)
+ example.features.feature['image/class/label'].int64_list.value.extend(
+ input_example.features.feature['image/class/label'].int64_list.value)
+
+ example.features.feature['image/seq_id'].bytes_list.value.extend(
+ input_example.features.feature['image/seq_id'].bytes_list.value)
+ example.features.feature['image/seq_num_frames'].int64_list.value.extend(
+ input_example.features.feature['image/seq_num_frames'].int64_list.value)
+ example.features.feature['image/seq_frame_num'].int64_list.value.extend(
+ input_example.features.feature['image/seq_frame_num'].int64_list.value)
+
+ example.features.feature['image/object/bbox/ymax'].float_list.value.extend(
+ input_example.features.feature[
+ 'image/object/bbox/ymax'].float_list.value)
+ example.features.feature['image/object/bbox/ymin'].float_list.value.extend(
+ input_example.features.feature[
+ 'image/object/bbox/ymin'].float_list.value)
+ example.features.feature['image/object/bbox/xmax'].float_list.value.extend(
+ input_example.features.feature[
+ 'image/object/bbox/xmax'].float_list.value)
+ example.features.feature['image/object/bbox/xmin'].float_list.value.extend(
+ input_example.features.feature[
+ 'image/object/bbox/xmin'].float_list.value)
+ example.features.feature[
+ 'image/object/class/score'].float_list.value.extend(
+ input_example.features.feature[
+ 'image/object/class/score'].float_list.value)
+ example.features.feature[
+ 'image/object/class/label'].int64_list.value.extend(
+ input_example.features.feature[
+ 'image/object/class/label'].int64_list.value)
+ example.features.feature[
+ 'image/object/class/text'].bytes_list.value.extend(
+ input_example.features.feature[
+ 'image/object/class/text'].bytes_list.value)
+
+ self._num_examples_processed.inc(1)
+ return [example]
+
+
+def construct_pipeline(input_tfrecord, output_tfrecord, model_dir,
+ top_k_embedding_count, bottom_k_embedding_count,
+ num_shards):
+ """Returns a beam pipeline to run object detection inference.
+
+ Args:
+ input_tfrecord: An TFRecord of tf.train.Example protos containing images.
+ output_tfrecord: An TFRecord of tf.train.Example protos that contain images
+ in the input TFRecord and the detections from the model.
+ model_dir: Path to `saved_model` to use for inference.
+ top_k_embedding_count: The number of high-confidence embeddings to store.
+ bottom_k_embedding_count: The number of low-confidence embeddings to store.
+ num_shards: The number of output shards.
+ """
+ def pipeline(root):
+ input_collection = (
+ root | 'ReadInputTFRecord' >> beam.io.tfrecordio.ReadFromTFRecord(
+ input_tfrecord,
+ coder=beam.coders.BytesCoder()))
+ output_collection = input_collection | 'ExtractEmbedding' >> beam.ParDo(
+ GenerateEmbeddingDataFn(model_dir, top_k_embedding_count,
+ bottom_k_embedding_count))
+ output_collection = output_collection | 'Reshuffle' >> beam.Reshuffle()
+ _ = output_collection | 'WritetoDisk' >> beam.io.tfrecordio.WriteToTFRecord(
+ output_tfrecord,
+ num_shards=num_shards,
+ coder=beam.coders.ProtoCoder(tf.train.Example))
+ return pipeline
+
+
+def main(_):
+ """Runs the Beam pipeline that performs inference.
+
+ Args:
+ _: unused
+ """
+ # must create before flags are used
+ runner = runners.DirectRunner()
+
+ dirname = os.path.dirname(FLAGS.embedding_output_tfrecord)
+ tf.io.gfile.makedirs(dirname)
+ runner.run(
+ construct_pipeline(FLAGS.embedding_input_tfrecord,
+ FLAGS.embedding_output_tfrecord,
+ FLAGS.embedding_model_dir, FLAGS.top_k_embedding_count,
+ FLAGS.bottom_k_embedding_count, FLAGS.num_shards))
+
+
+if __name__ == '__main__':
+ flags.mark_flags_as_required([
+ 'embedding_input_tfrecord',
+ 'embedding_output_tfrecord',
+ 'embedding_model_dir'
+ ])
+ app.run(main)
diff --git a/research/object_detection/dataset_tools/context_rcnn/generate_embedding_data_tf1_test.py b/research/object_detection/dataset_tools/context_rcnn/generate_embedding_data_tf1_test.py
new file mode 100644
index 0000000000000000000000000000000000000000..836bd59fb02e7b71e906cbdc0a56fd0e67fc02d4
--- /dev/null
+++ b/research/object_detection/dataset_tools/context_rcnn/generate_embedding_data_tf1_test.py
@@ -0,0 +1,340 @@
+# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Tests for generate_embedding_data."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+import contextlib
+import os
+import tempfile
+import unittest
+import numpy as np
+import six
+import tensorflow.compat.v1 as tf
+from object_detection import exporter
+from object_detection.builders import model_builder
+from object_detection.core import model
+from object_detection.dataset_tools.context_rcnn import generate_embedding_data
+from object_detection.protos import pipeline_pb2
+from object_detection.utils import tf_version
+from apache_beam import runners
+
+if six.PY2:
+ import mock # pylint: disable=g-import-not-at-top
+else:
+ mock = unittest.mock
+
+
+class FakeModel(model.DetectionModel):
+ """A Fake Detection model with expected output nodes from post-processing."""
+
+ def preprocess(self, inputs):
+ true_image_shapes = [] # Doesn't matter for the fake model.
+ return tf.identity(inputs), true_image_shapes
+
+ def predict(self, preprocessed_inputs, true_image_shapes):
+ return {'image': tf.layers.conv2d(preprocessed_inputs, 3, 1)}
+
+ def postprocess(self, prediction_dict, true_image_shapes):
+ with tf.control_dependencies(prediction_dict.values()):
+ num_features = 100
+ feature_dims = 10
+ classifier_feature = np.ones(
+ (2, feature_dims, feature_dims, num_features),
+ dtype=np.float32).tolist()
+ postprocessed_tensors = {
+ 'detection_boxes': tf.constant([[[0.0, 0.1, 0.5, 0.6],
+ [0.5, 0.5, 0.8, 0.8]]], tf.float32),
+ 'detection_scores': tf.constant([[0.95, 0.6]], tf.float32),
+ 'detection_multiclass_scores': tf.constant([[[0.1, 0.7, 0.2],
+ [0.3, 0.1, 0.6]]],
+ tf.float32),
+ 'detection_classes': tf.constant([[0, 1]], tf.float32),
+ 'num_detections': tf.constant([2], tf.float32),
+ 'detection_features':
+ tf.constant([classifier_feature],
+ tf.float32)
+ }
+ return postprocessed_tensors
+
+ def restore_map(self, checkpoint_path, fine_tune_checkpoint_type):
+ pass
+
+ def restore_from_objects(self, fine_tune_checkpoint_type):
+ pass
+
+ def loss(self, prediction_dict, true_image_shapes):
+ pass
+
+ def regularization_losses(self):
+ pass
+
+ def updates(self):
+ pass
+
+
+@contextlib.contextmanager
+def InMemoryTFRecord(entries):
+ temp = tempfile.NamedTemporaryFile(delete=False)
+ filename = temp.name
+ try:
+ with tf.python_io.TFRecordWriter(filename) as writer:
+ for value in entries:
+ writer.write(value)
+ yield filename
+ finally:
+ os.unlink(temp.name)
+
+
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.')
+class GenerateEmbeddingData(tf.test.TestCase):
+
+ def _save_checkpoint_from_mock_model(self, checkpoint_path):
+ """A function to save checkpoint from a fake Detection Model.
+
+ Args:
+ checkpoint_path: Path to save checkpoint from Fake model.
+ """
+ g = tf.Graph()
+ with g.as_default():
+ mock_model = FakeModel(num_classes=5)
+ preprocessed_inputs, true_image_shapes = mock_model.preprocess(
+ tf.placeholder(tf.float32, shape=[None, None, None, 3]))
+ predictions = mock_model.predict(preprocessed_inputs, true_image_shapes)
+ mock_model.postprocess(predictions, true_image_shapes)
+ tf.train.get_or_create_global_step()
+ saver = tf.train.Saver()
+ init = tf.global_variables_initializer()
+ with self.test_session(graph=g) as sess:
+ sess.run(init)
+ saver.save(sess, checkpoint_path)
+
+ def _export_saved_model(self):
+ tmp_dir = self.get_temp_dir()
+ checkpoint_path = os.path.join(tmp_dir, 'model.ckpt')
+ self._save_checkpoint_from_mock_model(checkpoint_path)
+ output_directory = os.path.join(tmp_dir, 'output')
+ saved_model_path = os.path.join(output_directory, 'saved_model')
+ tf.io.gfile.makedirs(output_directory)
+ with mock.patch.object(
+ model_builder, 'build', autospec=True) as mock_builder:
+ mock_builder.return_value = FakeModel(num_classes=5)
+ pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
+ pipeline_config.eval_config.use_moving_averages = False
+ detection_model = model_builder.build(pipeline_config.model,
+ is_training=False)
+ outputs, placeholder_tensor = exporter.build_detection_graph(
+ input_type='tf_example',
+ detection_model=detection_model,
+ input_shape=None,
+ output_collection_name='inference_op',
+ graph_hook_fn=None)
+ output_node_names = ','.join(outputs.keys())
+ saver = tf.train.Saver()
+ input_saver_def = saver.as_saver_def()
+ frozen_graph_def = exporter.freeze_graph_with_def_protos(
+ input_graph_def=tf.get_default_graph().as_graph_def(),
+ input_saver_def=input_saver_def,
+ input_checkpoint=checkpoint_path,
+ output_node_names=output_node_names,
+ restore_op_name='save/restore_all',
+ filename_tensor_name='save/Const:0',
+ output_graph='',
+ clear_devices=True,
+ initializer_nodes='')
+ exporter.write_saved_model(
+ saved_model_path=saved_model_path,
+ frozen_graph_def=frozen_graph_def,
+ inputs=placeholder_tensor,
+ outputs=outputs)
+ return saved_model_path
+
+ def _create_tf_example(self):
+ with self.test_session():
+ encoded_image = tf.image.encode_jpeg(
+ tf.constant(np.ones((4, 4, 3)).astype(np.uint8))).eval()
+
+ def BytesFeature(value):
+ return tf.train.Feature(bytes_list=tf.train.BytesList(value=[value]))
+
+ def Int64Feature(value):
+ return tf.train.Feature(int64_list=tf.train.Int64List(value=[value]))
+
+ def FloatFeature(value):
+ return tf.train.Feature(float_list=tf.train.FloatList(value=[value]))
+
+ example = tf.train.Example(
+ features=tf.train.Features(
+ feature={
+ 'image/encoded': BytesFeature(encoded_image),
+ 'image/source_id': BytesFeature(b'image_id'),
+ 'image/height': Int64Feature(400),
+ 'image/width': Int64Feature(600),
+ 'image/class/label': Int64Feature(5),
+ 'image/class/text': BytesFeature(b'hyena'),
+ 'image/object/bbox/xmin': FloatFeature(0.1),
+ 'image/object/bbox/xmax': FloatFeature(0.6),
+ 'image/object/bbox/ymin': FloatFeature(0.0),
+ 'image/object/bbox/ymax': FloatFeature(0.5),
+ 'image/object/class/score': FloatFeature(0.95),
+ 'image/object/class/label': Int64Feature(5),
+ 'image/object/class/text': BytesFeature(b'hyena'),
+ 'image/date_captured': BytesFeature(b'2019-10-20 12:12:12')
+ }))
+
+ return example.SerializeToString()
+
+ def assert_expected_example(self, example, topk=False, botk=False):
+ # Check embeddings
+ if topk or botk:
+ self.assertEqual(len(
+ example.features.feature['image/embedding'].float_list.value),
+ 218)
+ self.assertAllEqual(
+ example.features.feature['image/embedding_count'].int64_list.value,
+ [2])
+ else:
+ self.assertEqual(len(
+ example.features.feature['image/embedding'].float_list.value),
+ 109)
+ self.assertAllEqual(
+ example.features.feature['image/embedding_count'].int64_list.value,
+ [1])
+
+ self.assertAllEqual(
+ example.features.feature['image/embedding_length'].int64_list.value,
+ [109])
+
+ # Check annotations
+ self.assertAllClose(
+ example.features.feature['image/object/bbox/ymin'].float_list.value,
+ [0.0])
+ self.assertAllClose(
+ example.features.feature['image/object/bbox/xmin'].float_list.value,
+ [0.1])
+ self.assertAllClose(
+ example.features.feature['image/object/bbox/ymax'].float_list.value,
+ [0.5])
+ self.assertAllClose(
+ example.features.feature['image/object/bbox/xmax'].float_list.value,
+ [0.6])
+ self.assertAllClose(
+ example.features.feature['image/object/class/score']
+ .float_list.value, [0.95])
+ self.assertAllClose(
+ example.features.feature['image/object/class/label']
+ .int64_list.value, [5])
+ self.assertAllEqual(
+ example.features.feature['image/object/class/text']
+ .bytes_list.value, [b'hyena'])
+ self.assertAllClose(
+ example.features.feature['image/class/label']
+ .int64_list.value, [5])
+ self.assertAllEqual(
+ example.features.feature['image/class/text']
+ .bytes_list.value, [b'hyena'])
+
+ # Check other essential attributes.
+ self.assertAllEqual(
+ example.features.feature['image/height'].int64_list.value, [400])
+ self.assertAllEqual(
+ example.features.feature['image/width'].int64_list.value, [600])
+ self.assertAllEqual(
+ example.features.feature['image/source_id'].bytes_list.value,
+ [b'image_id'])
+ self.assertTrue(
+ example.features.feature['image/encoded'].bytes_list.value)
+
+ def test_generate_embedding_data_fn(self):
+ saved_model_path = self._export_saved_model()
+ top_k_embedding_count = 1
+ bottom_k_embedding_count = 0
+ inference_fn = generate_embedding_data.GenerateEmbeddingDataFn(
+ saved_model_path, top_k_embedding_count, bottom_k_embedding_count)
+ inference_fn.start_bundle()
+ generated_example = self._create_tf_example()
+ self.assertAllEqual(tf.train.Example.FromString(
+ generated_example).features.feature['image/object/class/label']
+ .int64_list.value, [5])
+ self.assertAllEqual(tf.train.Example.FromString(
+ generated_example).features.feature['image/object/class/text']
+ .bytes_list.value, [b'hyena'])
+ output = inference_fn.process(generated_example)
+ output_example = output[0]
+ self.assert_expected_example(output_example)
+
+ def test_generate_embedding_data_with_top_k_boxes(self):
+ saved_model_path = self._export_saved_model()
+ top_k_embedding_count = 2
+ bottom_k_embedding_count = 0
+ inference_fn = generate_embedding_data.GenerateEmbeddingDataFn(
+ saved_model_path, top_k_embedding_count, bottom_k_embedding_count)
+ inference_fn.start_bundle()
+ generated_example = self._create_tf_example()
+ self.assertAllEqual(
+ tf.train.Example.FromString(generated_example).features
+ .feature['image/object/class/label'].int64_list.value, [5])
+ self.assertAllEqual(
+ tf.train.Example.FromString(generated_example).features
+ .feature['image/object/class/text'].bytes_list.value, [b'hyena'])
+ output = inference_fn.process(generated_example)
+ output_example = output[0]
+ self.assert_expected_example(output_example, topk=True)
+
+ def test_generate_embedding_data_with_bottom_k_boxes(self):
+ saved_model_path = self._export_saved_model()
+ top_k_embedding_count = 0
+ bottom_k_embedding_count = 2
+ inference_fn = generate_embedding_data.GenerateEmbeddingDataFn(
+ saved_model_path, top_k_embedding_count, bottom_k_embedding_count)
+ inference_fn.start_bundle()
+ generated_example = self._create_tf_example()
+ self.assertAllEqual(
+ tf.train.Example.FromString(generated_example).features
+ .feature['image/object/class/label'].int64_list.value, [5])
+ self.assertAllEqual(
+ tf.train.Example.FromString(generated_example).features
+ .feature['image/object/class/text'].bytes_list.value, [b'hyena'])
+ output = inference_fn.process(generated_example)
+ output_example = output[0]
+ self.assert_expected_example(output_example, botk=True)
+
+ def test_beam_pipeline(self):
+ with InMemoryTFRecord([self._create_tf_example()]) as input_tfrecord:
+ runner = runners.DirectRunner()
+ temp_dir = tempfile.mkdtemp(dir=os.environ.get('TEST_TMPDIR'))
+ output_tfrecord = os.path.join(temp_dir, 'output_tfrecord')
+ saved_model_path = self._export_saved_model()
+ top_k_embedding_count = 1
+ bottom_k_embedding_count = 0
+ num_shards = 1
+ pipeline = generate_embedding_data.construct_pipeline(
+ input_tfrecord, output_tfrecord, saved_model_path,
+ top_k_embedding_count, bottom_k_embedding_count, num_shards)
+ runner.run(pipeline)
+ filenames = tf.io.gfile.glob(
+ output_tfrecord + '-?????-of-?????')
+ actual_output = []
+ record_iterator = tf.python_io.tf_record_iterator(path=filenames[0])
+ for record in record_iterator:
+ actual_output.append(record)
+ self.assertEqual(len(actual_output), 1)
+ self.assert_expected_example(tf.train.Example.FromString(
+ actual_output[0]))
+
+
+if __name__ == '__main__':
+ tf.test.main()
diff --git a/research/object_detection/dataset_tools/create_coco_tf_record.py b/research/object_detection/dataset_tools/create_coco_tf_record.py
index 51ed389105f827335de68ec9c85e04c0083242a5..2703c427e9bae8ebca5233f1ddaf7c42e5f1b82e 100644
--- a/research/object_detection/dataset_tools/create_coco_tf_record.py
+++ b/research/object_detection/dataset_tools/create_coco_tf_record.py
@@ -14,6 +14,9 @@
# ==============================================================================
r"""Convert raw COCO dataset to TFRecord for object_detection.
+This tool supports data generation for object detection (boxes, masks),
+keypoint detection, and DensePose.
+
Please note that this tool creates sharded output files.
Example usage:
@@ -63,7 +66,18 @@ tf.flags.DEFINE_string('train_keypoint_annotations_file', '',
'Training annotations JSON file.')
tf.flags.DEFINE_string('val_keypoint_annotations_file', '',
'Validation annotations JSON file.')
+# DensePose is only available for coco 2014.
+tf.flags.DEFINE_string('train_densepose_annotations_file', '',
+ 'Training annotations JSON file for DensePose.')
+tf.flags.DEFINE_string('val_densepose_annotations_file', '',
+ 'Validation annotations JSON file for DensePose.')
tf.flags.DEFINE_string('output_dir', '/tmp/', 'Output data directory.')
+# Whether to only produce images/annotations on person class (for keypoint /
+# densepose task).
+tf.flags.DEFINE_boolean('remove_non_person_annotations', False, 'Whether to '
+ 'remove all annotations for non-person objects.')
+tf.flags.DEFINE_boolean('remove_non_person_images', False, 'Whether to '
+ 'remove all examples that do not contain a person.')
FLAGS = flags.FLAGS
@@ -77,13 +91,33 @@ _COCO_KEYPOINT_NAMES = [
b'left_knee', b'right_knee', b'left_ankle', b'right_ankle'
]
+_COCO_PART_NAMES = [
+ b'torso_back', b'torso_front', b'right_hand', b'left_hand', b'left_foot',
+ b'right_foot', b'right_upper_leg_back', b'left_upper_leg_back',
+ b'right_upper_leg_front', b'left_upper_leg_front', b'right_lower_leg_back',
+ b'left_lower_leg_back', b'right_lower_leg_front', b'left_lower_leg_front',
+ b'left_upper_arm_back', b'right_upper_arm_back', b'left_upper_arm_front',
+ b'right_upper_arm_front', b'left_lower_arm_back', b'right_lower_arm_back',
+ b'left_lower_arm_front', b'right_lower_arm_front', b'right_face',
+ b'left_face',
+]
+
+_DP_PART_ID_OFFSET = 1
+
+
+def clip_to_unit(x):
+ return min(max(x, 0.0), 1.0)
+
def create_tf_example(image,
annotations_list,
image_dir,
category_index,
include_masks=False,
- keypoint_annotations_dict=None):
+ keypoint_annotations_dict=None,
+ densepose_annotations_dict=None,
+ remove_non_person_annotations=False,
+ remove_non_person_images=False):
"""Converts image and annotations to a tf.Example proto.
Args:
@@ -108,10 +142,23 @@ def create_tf_example(image,
dictionary with keys: [u'keypoints', u'num_keypoints'] represeting the
keypoint information for this person object annotation. If None, then
no keypoint annotations will be populated.
+ densepose_annotations_dict: A dictionary that maps from annotation_id to a
+ dictionary with keys: [u'dp_I', u'dp_x', u'dp_y', 'dp_U', 'dp_V']
+ representing part surface coordinates. For more information see
+ http://densepose.org/.
+ remove_non_person_annotations: Whether to remove any annotations that are
+ not the "person" class.
+ remove_non_person_images: Whether to remove any images that do not contain
+ at least one "person" annotation.
Returns:
+ key: SHA256 hash of the image.
example: The converted tf.Example
num_annotations_skipped: Number of (invalid) annotations that were ignored.
+ num_keypoint_annotation_skipped: Number of keypoint annotations that were
+ skipped.
+ num_densepose_annotation_skipped: Number of DensePose annotations that were
+ skipped.
Raises:
ValueError: if the image pointed to by data['filename'] is not a valid JPEG
@@ -146,6 +193,16 @@ def create_tf_example(image,
num_annotations_skipped = 0
num_keypoint_annotation_used = 0
num_keypoint_annotation_skipped = 0
+ dp_part_index = []
+ dp_x = []
+ dp_y = []
+ dp_u = []
+ dp_v = []
+ dp_num_points = []
+ densepose_keys = ['dp_I', 'dp_U', 'dp_V', 'dp_x', 'dp_y', 'bbox']
+ include_densepose = densepose_annotations_dict is not None
+ num_densepose_annotation_used = 0
+ num_densepose_annotation_skipped = 0
for object_annotations in annotations_list:
(x, y, width, height) = tuple(object_annotations['bbox'])
if width <= 0 or height <= 0:
@@ -154,14 +211,18 @@ def create_tf_example(image,
if x + width > image_width or y + height > image_height:
num_annotations_skipped += 1
continue
+ category_id = int(object_annotations['category_id'])
+ category_name = category_index[category_id]['name'].encode('utf8')
+ if remove_non_person_annotations and category_name != b'person':
+ num_annotations_skipped += 1
+ continue
xmin.append(float(x) / image_width)
xmax.append(float(x + width) / image_width)
ymin.append(float(y) / image_height)
ymax.append(float(y + height) / image_height)
is_crowd.append(object_annotations['iscrowd'])
- category_id = int(object_annotations['category_id'])
category_ids.append(category_id)
- category_names.append(category_index[category_id]['name'].encode('utf8'))
+ category_names.append(category_name)
area.append(object_annotations['area'])
if include_masks:
@@ -197,6 +258,40 @@ def create_tf_example(image,
keypoints_visibility.extend([0] * len(_COCO_KEYPOINT_NAMES))
keypoints_name.extend(_COCO_KEYPOINT_NAMES)
num_keypoints.append(0)
+
+ if include_densepose:
+ annotation_id = object_annotations['id']
+ if (annotation_id in densepose_annotations_dict and
+ all(key in densepose_annotations_dict[annotation_id]
+ for key in densepose_keys)):
+ dp_annotations = densepose_annotations_dict[annotation_id]
+ num_densepose_annotation_used += 1
+ dp_num_points.append(len(dp_annotations['dp_I']))
+ dp_part_index.extend([int(i - _DP_PART_ID_OFFSET)
+ for i in dp_annotations['dp_I']])
+ # DensePose surface coordinates are defined on a [256, 256] grid
+ # relative to each instance box (i.e. absolute coordinates in range
+ # [0., 256.]). The following converts the coordinates
+ # so that they are expressed in normalized image coordinates.
+ dp_x_box_rel = [
+ clip_to_unit(val / 256.) for val in dp_annotations['dp_x']]
+ dp_x_norm = [(float(x) + x_box_rel * width) / image_width
+ for x_box_rel in dp_x_box_rel]
+ dp_y_box_rel = [
+ clip_to_unit(val / 256.) for val in dp_annotations['dp_y']]
+ dp_y_norm = [(float(y) + y_box_rel * height) / image_height
+ for y_box_rel in dp_y_box_rel]
+ dp_x.extend(dp_x_norm)
+ dp_y.extend(dp_y_norm)
+ dp_u.extend(dp_annotations['dp_U'])
+ dp_v.extend(dp_annotations['dp_V'])
+ else:
+ dp_num_points.append(0)
+
+ if (remove_non_person_images and
+ not any(name == b'person' for name in category_names)):
+ return (key, None, num_annotations_skipped,
+ num_keypoint_annotation_skipped, num_densepose_annotation_skipped)
feature_dict = {
'image/height':
dataset_util.int64_feature(image_height),
@@ -243,15 +338,34 @@ def create_tf_example(image,
dataset_util.bytes_list_feature(keypoints_name))
num_keypoint_annotation_skipped = (
len(keypoint_annotations_dict) - num_keypoint_annotation_used)
+ if include_densepose:
+ feature_dict['image/object/densepose/num'] = (
+ dataset_util.int64_list_feature(dp_num_points))
+ feature_dict['image/object/densepose/part_index'] = (
+ dataset_util.int64_list_feature(dp_part_index))
+ feature_dict['image/object/densepose/x'] = (
+ dataset_util.float_list_feature(dp_x))
+ feature_dict['image/object/densepose/y'] = (
+ dataset_util.float_list_feature(dp_y))
+ feature_dict['image/object/densepose/u'] = (
+ dataset_util.float_list_feature(dp_u))
+ feature_dict['image/object/densepose/v'] = (
+ dataset_util.float_list_feature(dp_v))
+ num_densepose_annotation_skipped = (
+ len(densepose_annotations_dict) - num_densepose_annotation_used)
example = tf.train.Example(features=tf.train.Features(feature=feature_dict))
- return key, example, num_annotations_skipped, num_keypoint_annotation_skipped
+ return (key, example, num_annotations_skipped,
+ num_keypoint_annotation_skipped, num_densepose_annotation_skipped)
def _create_tf_record_from_coco_annotations(annotations_file, image_dir,
output_path, include_masks,
num_shards,
- keypoint_annotations_file=''):
+ keypoint_annotations_file='',
+ densepose_annotations_file='',
+ remove_non_person_annotations=False,
+ remove_non_person_images=False):
"""Loads COCO annotation json files and converts to tf.Record format.
Args:
@@ -264,6 +378,12 @@ def _create_tf_record_from_coco_annotations(annotations_file, image_dir,
keypoint_annotations_file: JSON file containing the person keypoint
annotations. If empty, then no person keypoint annotations will be
generated.
+ densepose_annotations_file: JSON file containing the DensePose annotations.
+ If empty, then no DensePose annotations will be generated.
+ remove_non_person_annotations: Whether to remove any annotations that are
+ not the "person" class.
+ remove_non_person_images: Whether to remove any images that do not contain
+ at least one "person" annotation.
"""
with contextlib2.ExitStack() as tf_record_close_stack, \
tf.gfile.GFile(annotations_file, 'r') as fid:
@@ -288,7 +408,8 @@ def _create_tf_record_from_coco_annotations(annotations_file, image_dir,
if image_id not in annotations_index:
missing_annotation_count += 1
annotations_index[image_id] = []
- logging.info('%d images are missing annotations.', missing_annotation_count)
+ logging.info('%d images are missing annotations.',
+ missing_annotation_count)
keypoint_annotations_index = {}
if keypoint_annotations_file:
@@ -301,8 +422,20 @@ def _create_tf_record_from_coco_annotations(annotations_file, image_dir,
keypoint_annotations_index[image_id] = {}
keypoint_annotations_index[image_id][annotation['id']] = annotation
+ densepose_annotations_index = {}
+ if densepose_annotations_file:
+ with tf.gfile.GFile(densepose_annotations_file, 'r') as fid:
+ densepose_groundtruth_data = json.load(fid)
+ if 'annotations' in densepose_groundtruth_data:
+ for annotation in densepose_groundtruth_data['annotations']:
+ image_id = annotation['image_id']
+ if image_id not in densepose_annotations_index:
+ densepose_annotations_index[image_id] = {}
+ densepose_annotations_index[image_id][annotation['id']] = annotation
+
total_num_annotations_skipped = 0
total_num_keypoint_annotations_skipped = 0
+ total_num_densepose_annotations_skipped = 0
for idx, image in enumerate(images):
if idx % 100 == 0:
logging.info('On image %d of %d', idx, len(images))
@@ -312,19 +445,31 @@ def _create_tf_record_from_coco_annotations(annotations_file, image_dir,
keypoint_annotations_dict = {}
if image['id'] in keypoint_annotations_index:
keypoint_annotations_dict = keypoint_annotations_index[image['id']]
- (_, tf_example, num_annotations_skipped,
- num_keypoint_annotations_skipped) = create_tf_example(
+ densepose_annotations_dict = None
+ if densepose_annotations_file:
+ densepose_annotations_dict = {}
+ if image['id'] in densepose_annotations_index:
+ densepose_annotations_dict = densepose_annotations_index[image['id']]
+ (_, tf_example, num_annotations_skipped, num_keypoint_annotations_skipped,
+ num_densepose_annotations_skipped) = create_tf_example(
image, annotations_list, image_dir, category_index, include_masks,
- keypoint_annotations_dict)
+ keypoint_annotations_dict, densepose_annotations_dict,
+ remove_non_person_annotations, remove_non_person_images)
total_num_annotations_skipped += num_annotations_skipped
total_num_keypoint_annotations_skipped += num_keypoint_annotations_skipped
+ total_num_densepose_annotations_skipped += (
+ num_densepose_annotations_skipped)
shard_idx = idx % num_shards
- output_tfrecords[shard_idx].write(tf_example.SerializeToString())
+ if tf_example:
+ output_tfrecords[shard_idx].write(tf_example.SerializeToString())
logging.info('Finished writing, skipped %d annotations.',
total_num_annotations_skipped)
if keypoint_annotations_file:
logging.info('Finished writing, skipped %d keypoint annotations.',
total_num_keypoint_annotations_skipped)
+ if densepose_annotations_file:
+ logging.info('Finished writing, skipped %d DensePose annotations.',
+ total_num_densepose_annotations_skipped)
def main(_):
@@ -347,20 +492,26 @@ def main(_):
train_output_path,
FLAGS.include_masks,
num_shards=100,
- keypoint_annotations_file=FLAGS.train_keypoint_annotations_file)
+ keypoint_annotations_file=FLAGS.train_keypoint_annotations_file,
+ densepose_annotations_file=FLAGS.train_densepose_annotations_file,
+ remove_non_person_annotations=FLAGS.remove_non_person_annotations,
+ remove_non_person_images=FLAGS.remove_non_person_images)
_create_tf_record_from_coco_annotations(
FLAGS.val_annotations_file,
FLAGS.val_image_dir,
val_output_path,
FLAGS.include_masks,
- num_shards=100,
- keypoint_annotations_file=FLAGS.val_keypoint_annotations_file)
+ num_shards=50,
+ keypoint_annotations_file=FLAGS.val_keypoint_annotations_file,
+ densepose_annotations_file=FLAGS.val_densepose_annotations_file,
+ remove_non_person_annotations=FLAGS.remove_non_person_annotations,
+ remove_non_person_images=FLAGS.remove_non_person_images)
_create_tf_record_from_coco_annotations(
FLAGS.testdev_annotations_file,
FLAGS.test_image_dir,
testdev_output_path,
FLAGS.include_masks,
- num_shards=100)
+ num_shards=50)
if __name__ == '__main__':
diff --git a/research/object_detection/dataset_tools/create_coco_tf_record_test.py b/research/object_detection/dataset_tools/create_coco_tf_record_test.py
index 0bcc8be9c7437734414e73e43cae8effb7c95681..659142b7b7022a4243025146162eaac4b8c9f165 100644
--- a/research/object_detection/dataset_tools/create_coco_tf_record_test.py
+++ b/research/object_detection/dataset_tools/create_coco_tf_record_test.py
@@ -89,7 +89,7 @@ class CreateCocoTFRecordTest(tf.test.TestCase):
}
(_, example,
- num_annotations_skipped, _) = create_coco_tf_record.create_tf_example(
+ num_annotations_skipped, _, _) = create_coco_tf_record.create_tf_example(
image, annotations_list, image_dir, category_index)
self.assertEqual(num_annotations_skipped, 0)
@@ -156,7 +156,7 @@ class CreateCocoTFRecordTest(tf.test.TestCase):
}
(_, example,
- num_annotations_skipped, _) = create_coco_tf_record.create_tf_example(
+ num_annotations_skipped, _, _) = create_coco_tf_record.create_tf_example(
image, annotations_list, image_dir, category_index, include_masks=True)
self.assertEqual(num_annotations_skipped, 0)
@@ -259,14 +259,14 @@ class CreateCocoTFRecordTest(tf.test.TestCase):
}
}
- (_, example, _,
- num_keypoint_annotation_skipped) = create_coco_tf_record.create_tf_example(
- image,
- annotations_list,
- image_dir,
- category_index,
- include_masks=False,
- keypoint_annotations_dict=keypoint_annotations_dict)
+ _, example, _, num_keypoint_annotation_skipped, _ = (
+ create_coco_tf_record.create_tf_example(
+ image,
+ annotations_list,
+ image_dir,
+ category_index,
+ include_masks=False,
+ keypoint_annotations_dict=keypoint_annotations_dict))
self.assertEqual(num_keypoint_annotation_skipped, 0)
self._assertProtoEqual(
@@ -310,6 +310,132 @@ class CreateCocoTFRecordTest(tf.test.TestCase):
example.features.feature[
'image/object/keypoint/visibility'].int64_list.value, vv)
+ def test_create_tf_example_with_dense_pose(self):
+ image_dir = self.get_temp_dir()
+ image_file_name = 'tmp_image.jpg'
+ image_data = np.random.randint(low=0, high=256, size=(256, 256, 3)).astype(
+ np.uint8)
+ save_path = os.path.join(image_dir, image_file_name)
+ image = PIL.Image.fromarray(image_data, 'RGB')
+ image.save(save_path)
+
+ image = {
+ 'file_name': image_file_name,
+ 'height': 256,
+ 'width': 256,
+ 'id': 11,
+ }
+
+ min_x, min_y = 64, 64
+ max_x, max_y = 128, 128
+ keypoints = []
+ num_visible_keypoints = 0
+ xv = []
+ yv = []
+ vv = []
+ for _ in range(17):
+ xc = min_x + int(np.random.rand()*(max_x - min_x))
+ yc = min_y + int(np.random.rand()*(max_y - min_y))
+ vis = np.random.randint(0, 3)
+ xv.append(xc)
+ yv.append(yc)
+ vv.append(vis)
+ keypoints.extend([xc, yc, vis])
+ num_visible_keypoints += (vis > 0)
+
+ annotations_list = [{
+ 'area': 0.5,
+ 'iscrowd': False,
+ 'image_id': 11,
+ 'bbox': [64, 64, 128, 128],
+ 'category_id': 1,
+ 'id': 1000
+ }]
+
+ num_points = 45
+ dp_i = np.random.randint(1, 25, (num_points,)).astype(np.float32)
+ dp_u = np.random.randn(num_points)
+ dp_v = np.random.randn(num_points)
+ dp_x = np.random.rand(num_points)*256.
+ dp_y = np.random.rand(num_points)*256.
+ densepose_annotations_dict = {
+ 1000: {
+ 'dp_I': dp_i,
+ 'dp_U': dp_u,
+ 'dp_V': dp_v,
+ 'dp_x': dp_x,
+ 'dp_y': dp_y,
+ 'bbox': [64, 64, 128, 128],
+ }
+ }
+
+ category_index = {
+ 1: {
+ 'name': 'person',
+ 'id': 1
+ }
+ }
+
+ _, example, _, _, num_densepose_annotation_skipped = (
+ create_coco_tf_record.create_tf_example(
+ image,
+ annotations_list,
+ image_dir,
+ category_index,
+ include_masks=False,
+ densepose_annotations_dict=densepose_annotations_dict))
+
+ self.assertEqual(num_densepose_annotation_skipped, 0)
+ self._assertProtoEqual(
+ example.features.feature['image/height'].int64_list.value, [256])
+ self._assertProtoEqual(
+ example.features.feature['image/width'].int64_list.value, [256])
+ self._assertProtoEqual(
+ example.features.feature['image/filename'].bytes_list.value,
+ [six.b(image_file_name)])
+ self._assertProtoEqual(
+ example.features.feature['image/source_id'].bytes_list.value,
+ [six.b(str(image['id']))])
+ self._assertProtoEqual(
+ example.features.feature['image/format'].bytes_list.value,
+ [six.b('jpeg')])
+ self._assertProtoEqual(
+ example.features.feature['image/object/bbox/xmin'].float_list.value,
+ [0.25])
+ self._assertProtoEqual(
+ example.features.feature['image/object/bbox/ymin'].float_list.value,
+ [0.25])
+ self._assertProtoEqual(
+ example.features.feature['image/object/bbox/xmax'].float_list.value,
+ [0.75])
+ self._assertProtoEqual(
+ example.features.feature['image/object/bbox/ymax'].float_list.value,
+ [0.75])
+ self._assertProtoEqual(
+ example.features.feature['image/object/class/text'].bytes_list.value,
+ [six.b('person')])
+ self._assertProtoEqual(
+ example.features.feature['image/object/densepose/num'].int64_list.value,
+ [num_points])
+ self.assertAllEqual(
+ example.features.feature[
+ 'image/object/densepose/part_index'].int64_list.value,
+ dp_i.astype(np.int64) - create_coco_tf_record._DP_PART_ID_OFFSET)
+ self.assertAllClose(
+ example.features.feature['image/object/densepose/u'].float_list.value,
+ dp_u)
+ self.assertAllClose(
+ example.features.feature['image/object/densepose/v'].float_list.value,
+ dp_v)
+ expected_dp_x = (64 + dp_x * 128. / 256.) / 256.
+ expected_dp_y = (64 + dp_y * 128. / 256.) / 256.
+ self.assertAllClose(
+ example.features.feature['image/object/densepose/x'].float_list.value,
+ expected_dp_x)
+ self.assertAllClose(
+ example.features.feature['image/object/densepose/y'].float_list.value,
+ expected_dp_y)
+
def test_create_sharded_tf_record(self):
tmp_dir = self.get_temp_dir()
image_paths = ['tmp1_image.jpg', 'tmp2_image.jpg']
diff --git a/research/object_detection/dataset_tools/seq_example_util_test.py b/research/object_detection/dataset_tools/seq_example_util_test.py
index 81fd4f54fc4ceec442b0962b0d11a0cfdcf5623d..fd721954be896b4044735dd67928044e413422e7 100644
--- a/research/object_detection/dataset_tools/seq_example_util_test.py
+++ b/research/object_detection/dataset_tools/seq_example_util_test.py
@@ -24,10 +24,18 @@ import six
import tensorflow.compat.v1 as tf
from object_detection.dataset_tools import seq_example_util
+from object_detection.utils import tf_version
class SeqExampleUtilTest(tf.test.TestCase):
+ def materialize_tensors(self, list_of_tensors):
+ if tf_version.is_tf2():
+ return [tensor.numpy() for tensor in list_of_tensors]
+ else:
+ with self.cached_session() as sess:
+ return sess.run(list_of_tensors)
+
def test_make_unlabeled_example(self):
num_frames = 5
image_height = 100
@@ -41,8 +49,7 @@ class SeqExampleUtilTest(tf.test.TestCase):
image_source_ids = [str(idx) for idx in range(num_frames)]
images_list = tf.unstack(images, axis=0)
encoded_images_list = [tf.io.encode_jpeg(image) for image in images_list]
- with tf.Session() as sess:
- encoded_images = sess.run(encoded_images_list)
+ encoded_images = self.materialize_tensors(encoded_images_list)
seq_example = seq_example_util.make_sequence_example(
dataset_name=dataset_name,
video_id=video_id,
@@ -109,8 +116,7 @@ class SeqExampleUtilTest(tf.test.TestCase):
dtype=tf.int32), dtype=tf.uint8)
images_list = tf.unstack(images, axis=0)
encoded_images_list = [tf.io.encode_jpeg(image) for image in images_list]
- with tf.Session() as sess:
- encoded_images = sess.run(encoded_images_list)
+ encoded_images = self.materialize_tensors(encoded_images_list)
timestamps = [100000, 110000]
is_annotated = [1, 0]
bboxes = [
@@ -208,8 +214,7 @@ class SeqExampleUtilTest(tf.test.TestCase):
dtype=tf.int32), dtype=tf.uint8)
images_list = tf.unstack(images, axis=0)
encoded_images_list = [tf.io.encode_jpeg(image) for image in images_list]
- with tf.Session() as sess:
- encoded_images = sess.run(encoded_images_list)
+ encoded_images = self.materialize_tensors(encoded_images_list)
bboxes = [
np.array([[0., 0., 0.75, 0.75],
[0., 0., 1., 1.]], dtype=np.float32),
@@ -283,7 +288,7 @@ class SeqExampleUtilTest(tf.test.TestCase):
[0.75, 1.],
seq_feature_dict['region/bbox/xmax'].feature[0].float_list.value[:])
self.assertAllEqual(
- ['cat', 'frog'],
+ [b'cat', b'frog'],
seq_feature_dict['region/label/string'].feature[0].bytes_list.value[:])
self.assertAllClose(
[0.],
@@ -327,7 +332,7 @@ class SeqExampleUtilTest(tf.test.TestCase):
[0.75],
seq_feature_dict['region/bbox/xmax'].feature[1].float_list.value[:])
self.assertAllEqual(
- ['cat'],
+ [b'cat'],
seq_feature_dict['region/label/string'].feature[1].bytes_list.value[:])
self.assertAllClose(
[],
diff --git a/research/object_detection/dataset_tools/tf_record_creation_util_test.py b/research/object_detection/dataset_tools/tf_record_creation_util_test.py
index 2873a6d146fbdb8ae62c558abe8f62e76943b515..5722c86472e617f5e2e2aba916ad9e90c418948b 100644
--- a/research/object_detection/dataset_tools/tf_record_creation_util_test.py
+++ b/research/object_detection/dataset_tools/tf_record_creation_util_test.py
@@ -42,7 +42,7 @@ class OpenOutputTfrecordsTests(tf.test.TestCase):
tf_record_path = '{}-{:05d}-of-00010'.format(
os.path.join(tf.test.get_temp_dir(), 'test.tfrec'), idx)
records = list(tf.python_io.tf_record_iterator(tf_record_path))
- self.assertAllEqual(records, ['test_{}'.format(idx)])
+ self.assertAllEqual(records, ['test_{}'.format(idx).encode('utf-8')])
if __name__ == '__main__':
diff --git a/research/object_detection/dockerfiles/tf1/Dockerfile b/research/object_detection/dockerfiles/tf1/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..76c0c1f07866557776a329cf215cf046c043ebb3
--- /dev/null
+++ b/research/object_detection/dockerfiles/tf1/Dockerfile
@@ -0,0 +1,44 @@
+FROM tensorflow/tensorflow:1.15.2-gpu-py3
+
+ARG DEBIAN_FRONTEND=noninteractive
+
+# Install apt dependencies
+RUN apt-get update && apt-get install -y \
+ git \
+ gpg-agent \
+ python3-cairocffi \
+ protobuf-compiler \
+ python3-pil \
+ python3-lxml \
+ python3-tk \
+ wget
+
+# Install gcloud and gsutil commands
+# https://cloud.google.com/sdk/docs/quickstart-debian-ubuntu
+RUN export CLOUD_SDK_REPO="cloud-sdk-$(lsb_release -c -s)" && \
+ echo "deb http://packages.cloud.google.com/apt $CLOUD_SDK_REPO main" | tee -a /etc/apt/sources.list.d/google-cloud-sdk.list && \
+ curl https://packages.cloud.google.com/apt/doc/apt-key.gpg | apt-key add - && \
+ apt-get update -y && apt-get install google-cloud-sdk -y
+
+# Add new user to avoid running as root
+RUN useradd -ms /bin/bash tensorflow
+USER tensorflow
+WORKDIR /home/tensorflow
+
+# Install pip dependencies
+RUN pip3 install --user absl-py
+RUN pip3 install --user contextlib2
+RUN pip3 install --user Cython
+RUN pip3 install --user jupyter
+RUN pip3 install --user matplotlib
+RUN pip3 install --user pycocotools
+RUN pip3 install --user tf-slim
+
+# Copy this version of of the model garden into the image
+COPY --chown=tensorflow . /home/tensorflow/models
+
+# Compile protobuf configs
+RUN (cd /home/tensorflow/models/research/ && protoc object_detection/protos/*.proto --python_out=.)
+
+ENV PYTHONPATH $PYTHONPATH:/home/tensorflow/models/research/:/home/tensorflow/models/research/slim
+ENV TF_CPP_MIN_LOG_LEVEL 3
diff --git a/research/object_detection/dockerfiles/tf1/README.md b/research/object_detection/dockerfiles/tf1/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..5e3e6d052daff9bd2ca6a0e6de2e118ee5f29417
--- /dev/null
+++ b/research/object_detection/dockerfiles/tf1/README.md
@@ -0,0 +1,11 @@
+# Tensorflow Object Detection on Docker
+
+These instructions are experimental.
+
+## Building and running:
+
+```bash
+# From the root of the git repository
+docker build -f research/object_detection/dockerfiles/1.15/Dockerfile -t od .
+docker run -it od
+```
diff --git a/research/object_detection/dockerfiles/tf2/Dockerfile b/research/object_detection/dockerfiles/tf2/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..c3d5e2e8f7c08d94857f8e25f15ed43710cba76e
--- /dev/null
+++ b/research/object_detection/dockerfiles/tf2/Dockerfile
@@ -0,0 +1,44 @@
+FROM tensorflow/tensorflow:2.2.0-gpu
+
+ARG DEBIAN_FRONTEND=noninteractive
+
+# Install apt dependencies
+RUN apt-get update && apt-get install -y \
+ git \
+ gpg-agent \
+ python3-cairocffi \
+ protobuf-compiler \
+ python3-pil \
+ python3-lxml \
+ python3-tk \
+ wget
+
+# Install gcloud and gsutil commands
+# https://cloud.google.com/sdk/docs/quickstart-debian-ubuntu
+RUN export CLOUD_SDK_REPO="cloud-sdk-$(lsb_release -c -s)" && \
+ echo "deb http://packages.cloud.google.com/apt $CLOUD_SDK_REPO main" | tee -a /etc/apt/sources.list.d/google-cloud-sdk.list && \
+ curl https://packages.cloud.google.com/apt/doc/apt-key.gpg | apt-key add - && \
+ apt-get update -y && apt-get install google-cloud-sdk -y
+
+# Add new user to avoid running as root
+RUN useradd -ms /bin/bash tensorflow
+USER tensorflow
+WORKDIR /home/tensorflow
+
+# Install pip dependencies
+RUN pip3 install --user absl-py
+RUN pip3 install --user contextlib2
+RUN pip3 install --user Cython
+RUN pip3 install --user jupyter
+RUN pip3 install --user matplotlib
+RUN pip3 install --user pycocotools
+RUN pip3 install --user tf-slim
+
+# Copy this version of of the model garden into the image
+COPY --chown=tensorflow . /home/tensorflow/models
+
+# Compile protobuf configs
+RUN (cd /home/tensorflow/models/research/ && protoc object_detection/protos/*.proto --python_out=.)
+
+ENV PYTHONPATH $PYTHONPATH:/home/tensorflow/models/research/:/home/tensorflow/models/research/slim
+ENV TF_CPP_MIN_LOG_LEVEL 3
diff --git a/research/object_detection/dockerfiles/tf2/README.md b/research/object_detection/dockerfiles/tf2/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..58b58db85769f826842e378ca1054cb6f3e392e8
--- /dev/null
+++ b/research/object_detection/dockerfiles/tf2/README.md
@@ -0,0 +1,11 @@
+# Tensorflow Object Detection on Docker
+
+These instructions are experimental.
+
+## Building and running:
+
+```bash
+# From the root of the git repository
+docker build -f research/object_detection/dockerfiles/2.2/Dockerfile -t od .
+docker run -it od
+```
diff --git a/research/object_detection/eval_util.py b/research/object_detection/eval_util.py
index e2d1255b5ae99fc49eb66e620e09e6a61fbcda6e..3b365df19a8093ad2c2a2ad39b8dd46f6d1a82c7 100644
--- a/research/object_detection/eval_util.py
+++ b/research/object_detection/eval_util.py
@@ -52,6 +52,8 @@ EVAL_METRICS_CLASS_DICT = {
coco_evaluation.CocoKeypointEvaluator,
'coco_mask_metrics':
coco_evaluation.CocoMaskEvaluator,
+ 'coco_panoptic_metrics':
+ coco_evaluation.CocoPanopticSegmentationEvaluator,
'oid_challenge_detection_metrics':
object_detection_evaluation.OpenImagesDetectionChallengeEvaluator,
'oid_challenge_segmentation_metrics':
diff --git a/research/object_detection/eval_util_test.py b/research/object_detection/eval_util_test.py
index f2f66405f7236d0806ccdeb55e84553fb20a2ee4..d0623f1fcda50482ee98eccb2e2e62ef10b88be3 100644
--- a/research/object_detection/eval_util_test.py
+++ b/research/object_detection/eval_util_test.py
@@ -18,6 +18,7 @@ from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
+import unittest
from absl.testing import parameterized
import numpy as np
@@ -30,6 +31,7 @@ from object_detection.core import standard_fields as fields
from object_detection.metrics import coco_evaluation
from object_detection.protos import eval_pb2
from object_detection.utils import test_case
+from object_detection.utils import tf_version
class EvalUtilTest(test_case.TestCase, parameterized.TestCase):
@@ -127,6 +129,7 @@ class EvalUtilTest(test_case.TestCase, parameterized.TestCase):
{'batch_size': 1, 'max_gt_boxes': None, 'scale_to_absolute': False},
{'batch_size': 8, 'max_gt_boxes': [1], 'scale_to_absolute': False}
)
+ @unittest.skipIf(tf_version.is_tf2(), 'Only compatible with TF1.X')
def test_get_eval_metric_ops_for_coco_detections(self, batch_size=1,
max_gt_boxes=None,
scale_to_absolute=False):
@@ -155,6 +158,7 @@ class EvalUtilTest(test_case.TestCase, parameterized.TestCase):
{'batch_size': 1, 'max_gt_boxes': None, 'scale_to_absolute': False},
{'batch_size': 8, 'max_gt_boxes': [1], 'scale_to_absolute': False}
)
+ @unittest.skipIf(tf_version.is_tf2(), 'Only compatible with TF1.X')
def test_get_eval_metric_ops_for_coco_detections_and_masks(
self, batch_size=1, max_gt_boxes=None, scale_to_absolute=False):
eval_config = eval_pb2.EvalConfig()
@@ -185,6 +189,7 @@ class EvalUtilTest(test_case.TestCase, parameterized.TestCase):
{'batch_size': 1, 'max_gt_boxes': None, 'scale_to_absolute': False},
{'batch_size': 8, 'max_gt_boxes': [1], 'scale_to_absolute': False}
)
+ @unittest.skipIf(tf_version.is_tf2(), 'Only compatible with TF1.X')
def test_get_eval_metric_ops_for_coco_detections_and_resized_masks(
self, batch_size=1, max_gt_boxes=None, scale_to_absolute=False):
eval_config = eval_pb2.EvalConfig()
@@ -210,6 +215,7 @@ class EvalUtilTest(test_case.TestCase, parameterized.TestCase):
self.assertAlmostEqual(1.0, metrics['DetectionBoxes_Precision/mAP'])
self.assertAlmostEqual(1.0, metrics['DetectionMasks_Precision/mAP'])
+ @unittest.skipIf(tf_version.is_tf2(), 'Only compatible with TF1.X')
def test_get_eval_metric_ops_raises_error_with_unsupported_metric(self):
eval_config = eval_pb2.EvalConfig()
eval_config.metrics_set.extend(['unsupported_metric'])
@@ -334,63 +340,67 @@ class EvalUtilTest(test_case.TestCase, parameterized.TestCase):
dtype=np.float32)
detection_keypoints = np.array([[0.0, 0.0], [0.5, 0.5], [1.0, 1.0]],
dtype=np.float32)
- detections = {
- detection_fields.detection_boxes:
- tf.constant(detection_boxes),
- detection_fields.detection_scores:
- tf.constant([[1.], [1.]]),
- detection_fields.detection_classes:
- tf.constant([[1], [2]]),
- detection_fields.num_detections:
- tf.constant([1, 1]),
- detection_fields.detection_keypoints:
- tf.tile(
- tf.reshape(
- tf.constant(detection_keypoints), shape=[1, 1, 3, 2]),
- multiples=[2, 1, 1, 1])
- }
-
- gt_boxes = detection_boxes
- groundtruth = {
- input_data_fields.groundtruth_boxes:
- tf.constant(gt_boxes),
- input_data_fields.groundtruth_classes:
- tf.constant([[1.], [1.]]),
- input_data_fields.groundtruth_keypoints:
- tf.tile(
- tf.reshape(
- tf.constant(detection_keypoints), shape=[1, 1, 3, 2]),
- multiples=[2, 1, 1, 1])
- }
-
- image = tf.zeros((2, 100, 100, 3), dtype=tf.float32)
-
- true_image_shapes = tf.constant([[100, 100, 3], [50, 100, 3]])
- original_image_spatial_shapes = tf.constant([[200, 200], [150, 300]])
-
- result = eval_util.result_dict_for_batched_example(
- image, key, detections, groundtruth,
- scale_to_absolute=True,
- true_image_shapes=true_image_shapes,
- original_image_spatial_shapes=original_image_spatial_shapes,
- max_gt_boxes=tf.constant(1))
-
- with self.test_session() as sess:
- result = sess.run(result)
- self.assertAllEqual(
- [[[0., 0., 200., 200.]], [[0.0, 0.0, 150., 150.]]],
- result[input_data_fields.groundtruth_boxes])
- self.assertAllClose([[[[0., 0.], [100., 100.], [200., 200.]]],
- [[[0., 0.], [150., 150.], [300., 300.]]]],
- result[input_data_fields.groundtruth_keypoints])
-
- # Predictions from the model are not scaled.
- self.assertAllEqual(
- [[[0., 0., 200., 200.]], [[0.0, 0.0, 75., 150.]]],
- result[detection_fields.detection_boxes])
- self.assertAllClose([[[[0., 0.], [100., 100.], [200., 200.]]],
- [[[0., 0.], [75., 150.], [150., 300.]]]],
- result[detection_fields.detection_keypoints])
+ def graph_fn():
+ detections = {
+ detection_fields.detection_boxes:
+ tf.constant(detection_boxes),
+ detection_fields.detection_scores:
+ tf.constant([[1.], [1.]]),
+ detection_fields.detection_classes:
+ tf.constant([[1], [2]]),
+ detection_fields.num_detections:
+ tf.constant([1, 1]),
+ detection_fields.detection_keypoints:
+ tf.tile(
+ tf.reshape(
+ tf.constant(detection_keypoints), shape=[1, 1, 3, 2]),
+ multiples=[2, 1, 1, 1])
+ }
+
+ gt_boxes = detection_boxes
+ groundtruth = {
+ input_data_fields.groundtruth_boxes:
+ tf.constant(gt_boxes),
+ input_data_fields.groundtruth_classes:
+ tf.constant([[1.], [1.]]),
+ input_data_fields.groundtruth_keypoints:
+ tf.tile(
+ tf.reshape(
+ tf.constant(detection_keypoints), shape=[1, 1, 3, 2]),
+ multiples=[2, 1, 1, 1])
+ }
+
+ image = tf.zeros((2, 100, 100, 3), dtype=tf.float32)
+
+ true_image_shapes = tf.constant([[100, 100, 3], [50, 100, 3]])
+ original_image_spatial_shapes = tf.constant([[200, 200], [150, 300]])
+
+ result = eval_util.result_dict_for_batched_example(
+ image, key, detections, groundtruth,
+ scale_to_absolute=True,
+ true_image_shapes=true_image_shapes,
+ original_image_spatial_shapes=original_image_spatial_shapes,
+ max_gt_boxes=tf.constant(1))
+ return (result[input_data_fields.groundtruth_boxes],
+ result[input_data_fields.groundtruth_keypoints],
+ result[detection_fields.detection_boxes],
+ result[detection_fields.detection_keypoints])
+ (gt_boxes, gt_keypoints, detection_boxes,
+ detection_keypoints) = self.execute_cpu(graph_fn, [])
+ self.assertAllEqual(
+ [[[0., 0., 200., 200.]], [[0.0, 0.0, 150., 150.]]],
+ gt_boxes)
+ self.assertAllClose([[[[0., 0.], [100., 100.], [200., 200.]]],
+ [[[0., 0.], [150., 150.], [300., 300.]]]],
+ gt_keypoints)
+
+ # Predictions from the model are not scaled.
+ self.assertAllEqual(
+ [[[0., 0., 200., 200.]], [[0.0, 0.0, 75., 150.]]],
+ detection_boxes)
+ self.assertAllClose([[[[0., 0.], [100., 100.], [200., 200.]]],
+ [[[0., 0.], [75., 150.], [150., 300.]]]],
+ detection_keypoints)
if __name__ == '__main__':
diff --git a/research/object_detection/export_inference_graph.py b/research/object_detection/export_inference_graph.py
index bcb5c40b3b8c2b3bbfc95c00baa348889954dbb8..5a0ee0dde056afacca9a876c7456cb82a82f3192 100644
--- a/research/object_detection/export_inference_graph.py
+++ b/research/object_detection/export_inference_graph.py
@@ -134,6 +134,30 @@ flags.DEFINE_string('config_override', '',
'text proto to override pipeline_config_path.')
flags.DEFINE_boolean('write_inference_graph', False,
'If true, writes inference graph to disk.')
+flags.DEFINE_string('additional_output_tensor_names', None,
+ 'Additional Tensors to output, to be specified as a comma '
+ 'separated list of tensor names.')
+flags.DEFINE_boolean('use_side_inputs', False,
+ 'If True, uses side inputs as well as image inputs.')
+flags.DEFINE_string('side_input_shapes', None,
+ 'If use_side_inputs is True, this explicitly sets '
+ 'the shape of the side input tensors to a fixed size. The '
+ 'dimensions are to be provided as a comma-separated list '
+ 'of integers. A value of -1 can be used for unknown '
+ 'dimensions. A `/` denotes a break, starting the shape of '
+ 'the next side input tensor. This flag is required if '
+ 'using side inputs.')
+flags.DEFINE_string('side_input_types', None,
+ 'If use_side_inputs is True, this explicitly sets '
+ 'the type of the side input tensors. The '
+ 'dimensions are to be provided as a comma-separated list '
+ 'of types, each of `string`, `integer`, or `float`. '
+ 'This flag is required if using side inputs.')
+flags.DEFINE_string('side_input_names', None,
+ 'If use_side_inputs is True, this explicitly sets '
+ 'the names of the side input tensors required by the model '
+ 'assuming the names will be a comma-separated list of '
+ 'strings. This flag is required if using side inputs.')
tf.app.flags.mark_flag_as_required('pipeline_config_path')
tf.app.flags.mark_flag_as_required('trained_checkpoint_prefix')
tf.app.flags.mark_flag_as_required('output_directory')
@@ -152,10 +176,30 @@ def main(_):
]
else:
input_shape = None
+ if FLAGS.use_side_inputs:
+ side_input_shapes, side_input_names, side_input_types = (
+ exporter.parse_side_inputs(
+ FLAGS.side_input_shapes,
+ FLAGS.side_input_names,
+ FLAGS.side_input_types))
+ else:
+ side_input_shapes = None
+ side_input_names = None
+ side_input_types = None
+ if FLAGS.additional_output_tensor_names:
+ additional_output_tensor_names = list(
+ FLAGS.additional_output_tensor_names.split(','))
+ else:
+ additional_output_tensor_names = None
exporter.export_inference_graph(
FLAGS.input_type, pipeline_config, FLAGS.trained_checkpoint_prefix,
FLAGS.output_directory, input_shape=input_shape,
- write_inference_graph=FLAGS.write_inference_graph)
+ write_inference_graph=FLAGS.write_inference_graph,
+ additional_output_tensor_names=additional_output_tensor_names,
+ use_side_inputs=FLAGS.use_side_inputs,
+ side_input_shapes=side_input_shapes,
+ side_input_names=side_input_names,
+ side_input_types=side_input_types)
if __name__ == '__main__':
diff --git a/research/object_detection/export_tflite_ssd_graph_lib.py b/research/object_detection/export_tflite_ssd_graph_lib.py
index 229daab00a2ea2288ce854c508eb4ea48f63bacc..f72e9525bfd75b58c874cba5b790cbac710cb9dd 100644
--- a/research/object_detection/export_tflite_ssd_graph_lib.py
+++ b/research/object_detection/export_tflite_ssd_graph_lib.py
@@ -24,16 +24,19 @@ import tensorflow.compat.v1 as tf
from tensorflow.core.framework import attr_value_pb2
from tensorflow.core.framework import types_pb2
from tensorflow.core.protobuf import saver_pb2
-from tensorflow.tools.graph_transforms import TransformGraph
from object_detection import exporter
from object_detection.builders import graph_rewriter_builder
from object_detection.builders import model_builder
from object_detection.builders import post_processing_builder
from object_detection.core import box_list
+from object_detection.utils import tf_version
_DEFAULT_NUM_CHANNELS = 3
_DEFAULT_NUM_COORD_BOX = 4
+if tf_version.is_tf1():
+ from tensorflow.tools.graph_transforms import TransformGraph # pylint: disable=g-import-not-at-top
+
def get_const_center_size_encoded_anchors(anchors):
"""Exports center-size encoded anchors as a constant tensor.
diff --git a/research/object_detection/export_tflite_ssd_graph_lib_test.py b/research/object_detection/export_tflite_ssd_graph_lib_tf1_test.py
similarity index 98%
rename from research/object_detection/export_tflite_ssd_graph_lib_test.py
rename to research/object_detection/export_tflite_ssd_graph_lib_tf1_test.py
index 5b6082d109c7824651c0cfdce95e41a5126fa653..3625b9f651c157f52f690b1c9adf1e7ce19f2b94 100644
--- a/research/object_detection/export_tflite_ssd_graph_lib_test.py
+++ b/research/object_detection/export_tflite_ssd_graph_lib_tf1_test.py
@@ -18,6 +18,7 @@ from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
+import unittest
import numpy as np
import six
import tensorflow.compat.v1 as tf
@@ -32,6 +33,7 @@ from object_detection.core import model
from object_detection.protos import graph_rewriter_pb2
from object_detection.protos import pipeline_pb2
from object_detection.protos import post_processing_pb2
+from object_detection.utils import tf_version
# pylint: disable=g-import-not-at-top
@@ -72,6 +74,9 @@ class FakeModel(model.DetectionModel):
def restore_map(self, checkpoint_path, from_detection_checkpoint):
pass
+ def restore_from_objects(self, fine_tune_checkpoint_type):
+ pass
+
def loss(self, prediction_dict, true_image_shapes):
pass
@@ -82,6 +87,7 @@ class FakeModel(model.DetectionModel):
pass
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.')
class ExportTfliteGraphTest(tf.test.TestCase):
def _save_checkpoint_from_mock_model(self,
@@ -413,7 +419,7 @@ class ExportTfliteGraphTest(tf.test.TestCase):
tflite_graph_file = self._export_graph_with_postprocessing_op(
pipeline_config)
self.assertTrue(os.path.exists(tflite_graph_file))
- mock_get.assert_called_once()
+ self.assertEqual(1, mock_get.call_count)
if __name__ == '__main__':
diff --git a/research/object_detection/exporter.py b/research/object_detection/exporter.py
index 676e34debea179e0c772bc392362f1a43b24bc75..61c5f7f22db46c88c8bc5c1803b281da4c020967 100644
--- a/research/object_detection/exporter.py
+++ b/research/object_detection/exporter.py
@@ -39,6 +39,54 @@ except ImportError:
freeze_graph_with_def_protos = freeze_graph.freeze_graph_with_def_protos
+def parse_side_inputs(side_input_shapes_string, side_input_names_string,
+ side_input_types_string):
+ """Parses side input flags.
+
+ Args:
+ side_input_shapes_string: The shape of the side input tensors, provided as a
+ comma-separated list of integers. A value of -1 is used for unknown
+ dimensions. A `/` denotes a break, starting the shape of the next side
+ input tensor.
+ side_input_names_string: The names of the side input tensors, provided as a
+ comma-separated list of strings.
+ side_input_types_string: The type of the side input tensors, provided as a
+ comma-separated list of types, each of `string`, `integer`, or `float`.
+
+ Returns:
+ side_input_shapes: A list of shapes.
+ side_input_names: A list of strings.
+ side_input_types: A list of tensorflow dtypes.
+
+ """
+ if side_input_shapes_string:
+ side_input_shapes = []
+ for side_input_shape_list in side_input_shapes_string.split('/'):
+ side_input_shape = [
+ int(dim) if dim != '-1' else None
+ for dim in side_input_shape_list.split(',')
+ ]
+ side_input_shapes.append(side_input_shape)
+ else:
+ raise ValueError('When using side_inputs, side_input_shapes must be '
+ 'specified in the input flags.')
+ if side_input_names_string:
+ side_input_names = list(side_input_names_string.split(','))
+ else:
+ raise ValueError('When using side_inputs, side_input_names must be '
+ 'specified in the input flags.')
+ if side_input_types_string:
+ typelookup = {'float': tf.float32, 'int': tf.int32, 'string': tf.string}
+ side_input_types = [
+ typelookup[side_input_type]
+ for side_input_type in side_input_types_string.split(',')
+ ]
+ else:
+ raise ValueError('When using side_inputs, side_input_types must be '
+ 'specified in the input flags.')
+ return side_input_shapes, side_input_names, side_input_types
+
+
def rewrite_nn_resize_op(is_quantized=False):
"""Replaces a custom nearest-neighbor resize op with the Tensorflow version.
@@ -140,6 +188,14 @@ def _image_tensor_input_placeholder(input_shape=None):
return input_tensor, input_tensor
+def _side_input_tensor_placeholder(side_input_shape, side_input_name,
+ side_input_type):
+ """Returns side input placeholder and side input tensor."""
+ side_input_tensor = tf.placeholder(
+ dtype=side_input_type, shape=side_input_shape, name=side_input_name)
+ return side_input_tensor, side_input_tensor
+
+
def _tf_example_input_placeholder(input_shape=None):
"""Returns input that accepts a batch of strings with tf examples.
@@ -200,7 +256,7 @@ input_placeholder_fn_map = {
'image_tensor': _image_tensor_input_placeholder,
'encoded_image_string_tensor':
_encoded_image_string_tensor_input_placeholder,
- 'tf_example': _tf_example_input_placeholder,
+ 'tf_example': _tf_example_input_placeholder
}
@@ -312,7 +368,7 @@ def write_saved_model(saved_model_path,
Args:
saved_model_path: Path to write SavedModel.
frozen_graph_def: tf.GraphDef holding frozen graph.
- inputs: The input placeholder tensor.
+ inputs: A tensor dictionary containing the inputs to a DetectionModel.
outputs: A tensor dictionary containing the outputs of a DetectionModel.
"""
with tf.Graph().as_default():
@@ -322,8 +378,13 @@ def write_saved_model(saved_model_path,
builder = tf.saved_model.builder.SavedModelBuilder(saved_model_path)
- tensor_info_inputs = {
- 'inputs': tf.saved_model.utils.build_tensor_info(inputs)}
+ tensor_info_inputs = {}
+ if isinstance(inputs, dict):
+ for k, v in inputs.items():
+ tensor_info_inputs[k] = tf.saved_model.utils.build_tensor_info(v)
+ else:
+ tensor_info_inputs['inputs'] = tf.saved_model.utils.build_tensor_info(
+ inputs)
tensor_info_outputs = {}
for k, v in outputs.items():
tensor_info_outputs[k] = tf.saved_model.utils.build_tensor_info(v)
@@ -364,11 +425,11 @@ def write_graph_and_checkpoint(inference_graph_def,
def _get_outputs_from_inputs(input_tensors, detection_model,
- output_collection_name):
+ output_collection_name, **side_inputs):
inputs = tf.cast(input_tensors, dtype=tf.float32)
preprocessed_inputs, true_image_shapes = detection_model.preprocess(inputs)
output_tensors = detection_model.predict(
- preprocessed_inputs, true_image_shapes)
+ preprocessed_inputs, true_image_shapes, **side_inputs)
postprocessed_tensors = detection_model.postprocess(
output_tensors, true_image_shapes)
return add_output_tensor_nodes(postprocessed_tensors,
@@ -376,32 +437,45 @@ def _get_outputs_from_inputs(input_tensors, detection_model,
def build_detection_graph(input_type, detection_model, input_shape,
- output_collection_name, graph_hook_fn):
+ output_collection_name, graph_hook_fn,
+ use_side_inputs=False, side_input_shapes=None,
+ side_input_names=None, side_input_types=None):
"""Build the detection graph."""
if input_type not in input_placeholder_fn_map:
raise ValueError('Unknown input type: {}'.format(input_type))
placeholder_args = {}
+ side_inputs = {}
if input_shape is not None:
if (input_type != 'image_tensor' and
input_type != 'encoded_image_string_tensor' and
- input_type != 'tf_example'):
+ input_type != 'tf_example' and
+ input_type != 'tf_sequence_example'):
raise ValueError('Can only specify input shape for `image_tensor`, '
- '`encoded_image_string_tensor`, or `tf_example` '
- 'inputs.')
+ '`encoded_image_string_tensor`, `tf_example`, '
+ ' or `tf_sequence_example` inputs.')
placeholder_args['input_shape'] = input_shape
placeholder_tensor, input_tensors = input_placeholder_fn_map[input_type](
**placeholder_args)
+ placeholder_tensors = {'inputs': placeholder_tensor}
+ if use_side_inputs:
+ for idx, side_input_name in enumerate(side_input_names):
+ side_input_placeholder, side_input = _side_input_tensor_placeholder(
+ side_input_shapes[idx], side_input_name, side_input_types[idx])
+ print(side_input)
+ side_inputs[side_input_name] = side_input
+ placeholder_tensors[side_input_name] = side_input_placeholder
outputs = _get_outputs_from_inputs(
input_tensors=input_tensors,
detection_model=detection_model,
- output_collection_name=output_collection_name)
+ output_collection_name=output_collection_name,
+ **side_inputs)
# Add global step to the graph.
slim.get_or_create_global_step()
if graph_hook_fn: graph_hook_fn()
- return outputs, placeholder_tensor
+ return outputs, placeholder_tensors
def _export_inference_graph(input_type,
@@ -414,7 +488,11 @@ def _export_inference_graph(input_type,
output_collection_name='inference_op',
graph_hook_fn=None,
write_inference_graph=False,
- temp_checkpoint_prefix=''):
+ temp_checkpoint_prefix='',
+ use_side_inputs=False,
+ side_input_shapes=None,
+ side_input_names=None,
+ side_input_types=None):
"""Export helper."""
tf.gfile.MakeDirs(output_directory)
frozen_graph_path = os.path.join(output_directory,
@@ -422,12 +500,16 @@ def _export_inference_graph(input_type,
saved_model_path = os.path.join(output_directory, 'saved_model')
model_path = os.path.join(output_directory, 'model.ckpt')
- outputs, placeholder_tensor = build_detection_graph(
+ outputs, placeholder_tensor_dict = build_detection_graph(
input_type=input_type,
detection_model=detection_model,
input_shape=input_shape,
output_collection_name=output_collection_name,
- graph_hook_fn=graph_hook_fn)
+ graph_hook_fn=graph_hook_fn,
+ use_side_inputs=use_side_inputs,
+ side_input_shapes=side_input_shapes,
+ side_input_names=side_input_names,
+ side_input_types=side_input_types)
profile_inference_graph(tf.get_default_graph())
saver_kwargs = {}
@@ -464,7 +546,8 @@ def _export_inference_graph(input_type,
f.write(str(inference_graph_def))
if additional_output_tensor_names is not None:
- output_node_names = ','.join(outputs.keys()+additional_output_tensor_names)
+ output_node_names = ','.join(list(outputs.keys())+(
+ additional_output_tensor_names))
else:
output_node_names = ','.join(outputs.keys())
@@ -480,7 +563,7 @@ def _export_inference_graph(input_type,
initializer_nodes='')
write_saved_model(saved_model_path, frozen_graph_def,
- placeholder_tensor, outputs)
+ placeholder_tensor_dict, outputs)
def export_inference_graph(input_type,
@@ -490,7 +573,11 @@ def export_inference_graph(input_type,
input_shape=None,
output_collection_name='inference_op',
additional_output_tensor_names=None,
- write_inference_graph=False):
+ write_inference_graph=False,
+ use_side_inputs=False,
+ side_input_shapes=None,
+ side_input_names=None,
+ side_input_types=None):
"""Exports inference graph for the model specified in the pipeline config.
Args:
@@ -506,6 +593,13 @@ def export_inference_graph(input_type,
additional_output_tensor_names: list of additional output
tensors to include in the frozen graph.
write_inference_graph: If true, writes inference graph to disk.
+ use_side_inputs: If True, the model requires side_inputs.
+ side_input_shapes: List of shapes of the side input tensors,
+ required if use_side_inputs is True.
+ side_input_names: List of names of the side input tensors,
+ required if use_side_inputs is True.
+ side_input_types: List of types of the side input tensors,
+ required if use_side_inputs is True.
"""
detection_model = model_builder.build(pipeline_config.model,
is_training=False)
@@ -524,7 +618,11 @@ def export_inference_graph(input_type,
input_shape,
output_collection_name,
graph_hook_fn=graph_rewriter_fn,
- write_inference_graph=write_inference_graph)
+ write_inference_graph=write_inference_graph,
+ use_side_inputs=use_side_inputs,
+ side_input_shapes=side_input_shapes,
+ side_input_names=side_input_names,
+ side_input_types=side_input_types)
pipeline_config.eval_config.use_moving_averages = False
config_util.save_pipeline_config(pipeline_config, output_directory)
diff --git a/research/object_detection/exporter_lib_tf2_test.py b/research/object_detection/exporter_lib_tf2_test.py
new file mode 100644
index 0000000000000000000000000000000000000000..99cbf263bece871d1a7d3b5a9f92e22c3f356412
--- /dev/null
+++ b/research/object_detection/exporter_lib_tf2_test.py
@@ -0,0 +1,240 @@
+# Lint as: python2, python3
+# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+
+"""Test for exporter_lib_v2.py."""
+
+from __future__ import division
+import io
+import os
+import unittest
+from absl.testing import parameterized
+import numpy as np
+from PIL import Image
+import six
+
+import tensorflow.compat.v2 as tf
+
+from object_detection import exporter_lib_v2
+from object_detection.builders import model_builder
+from object_detection.core import model
+from object_detection.core import standard_fields as fields
+from object_detection.protos import pipeline_pb2
+from object_detection.utils import dataset_util
+from object_detection.utils import tf_version
+
+if six.PY2:
+ import mock # pylint: disable=g-importing-member,g-import-not-at-top
+else:
+ from unittest import mock # pylint: disable=g-importing-member,g-import-not-at-top
+
+
+class FakeModel(model.DetectionModel):
+
+ def __init__(self, conv_weight_scalar=1.0):
+ super(FakeModel, self).__init__(num_classes=2)
+ self._conv = tf.keras.layers.Conv2D(
+ filters=1, kernel_size=1, strides=(1, 1), padding='valid',
+ kernel_initializer=tf.keras.initializers.Constant(
+ value=conv_weight_scalar))
+
+ def preprocess(self, inputs):
+ true_image_shapes = [] # Doesn't matter for the fake model.
+ return tf.identity(inputs), true_image_shapes
+
+ def predict(self, preprocessed_inputs, true_image_shapes):
+ return {'image': self._conv(preprocessed_inputs)}
+
+ def postprocess(self, prediction_dict, true_image_shapes):
+ predict_tensor_sum = tf.reduce_sum(prediction_dict['image'])
+ with tf.control_dependencies(list(prediction_dict.values())):
+ postprocessed_tensors = {
+ 'detection_boxes': tf.constant([[[0.0, 0.0, 0.5, 0.5],
+ [0.5, 0.5, 0.8, 0.8]],
+ [[0.5, 0.5, 1.0, 1.0],
+ [0.0, 0.0, 0.0, 0.0]]], tf.float32),
+ 'detection_scores': predict_tensor_sum + tf.constant(
+ [[0.7, 0.6], [0.9, 0.0]], tf.float32),
+ 'detection_classes': tf.constant([[0, 1],
+ [1, 0]], tf.float32),
+ 'num_detections': tf.constant([2, 1], tf.float32),
+ }
+ return postprocessed_tensors
+
+ def restore_map(self, checkpoint_path, fine_tune_checkpoint_type):
+ pass
+
+ def restore_from_objects(self, fine_tune_checkpoint_type):
+ pass
+
+ def loss(self, prediction_dict, true_image_shapes):
+ pass
+
+ def regularization_losses(self):
+ pass
+
+ def updates(self):
+ pass
+
+
+@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.')
+class ExportInferenceGraphTest(tf.test.TestCase, parameterized.TestCase):
+
+ def _save_checkpoint_from_mock_model(
+ self, checkpoint_dir, conv_weight_scalar=6.0):
+ mock_model = FakeModel(conv_weight_scalar)
+ fake_image = tf.zeros(shape=[1, 10, 10, 3], dtype=tf.float32)
+ preprocessed_inputs, true_image_shapes = mock_model.preprocess(fake_image)
+ predictions = mock_model.predict(preprocessed_inputs, true_image_shapes)
+ mock_model.postprocess(predictions, true_image_shapes)
+
+ ckpt = tf.train.Checkpoint(model=mock_model)
+ exported_checkpoint_manager = tf.train.CheckpointManager(
+ ckpt, checkpoint_dir, max_to_keep=1)
+ exported_checkpoint_manager.save(checkpoint_number=0)
+
+ @parameterized.parameters(
+ {'input_type': 'image_tensor'},
+ {'input_type': 'encoded_image_string_tensor'},
+ {'input_type': 'tf_example'},
+ )
+ def test_export_yields_correct_directory_structure(
+ self, input_type='image_tensor'):
+ tmp_dir = self.get_temp_dir()
+ self._save_checkpoint_from_mock_model(tmp_dir)
+ with mock.patch.object(
+ model_builder, 'build', autospec=True) as mock_builder:
+ mock_builder.return_value = FakeModel()
+ output_directory = os.path.join(tmp_dir, 'output')
+ pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
+ exporter_lib_v2.export_inference_graph(
+ input_type=input_type,
+ pipeline_config=pipeline_config,
+ trained_checkpoint_dir=tmp_dir,
+ output_directory=output_directory)
+ self.assertTrue(os.path.exists(os.path.join(
+ output_directory, 'saved_model', 'saved_model.pb')))
+ self.assertTrue(os.path.exists(os.path.join(
+ output_directory, 'saved_model', 'variables', 'variables.index')))
+ self.assertTrue(os.path.exists(os.path.join(
+ output_directory, 'saved_model', 'variables',
+ 'variables.data-00000-of-00001')))
+ self.assertTrue(os.path.exists(os.path.join(
+ output_directory, 'checkpoint', 'ckpt-0.index')))
+ self.assertTrue(os.path.exists(os.path.join(
+ output_directory, 'checkpoint', 'ckpt-0.data-00000-of-00001')))
+ self.assertTrue(os.path.exists(os.path.join(
+ output_directory, 'pipeline.config')))
+
+ def get_dummy_input(self, input_type):
+ """Get dummy input for the given input type."""
+
+ if input_type == 'image_tensor':
+ return np.zeros(shape=(1, 20, 20, 3), dtype=np.uint8)
+ if input_type == 'float_image_tensor':
+ return np.zeros(shape=(1, 20, 20, 3), dtype=np.float32)
+ elif input_type == 'encoded_image_string_tensor':
+ image = Image.new('RGB', (20, 20))
+ byte_io = io.BytesIO()
+ image.save(byte_io, 'PNG')
+ return [byte_io.getvalue()]
+ elif input_type == 'tf_example':
+ image_tensor = tf.zeros((20, 20, 3), dtype=tf.uint8)
+ encoded_jpeg = tf.image.encode_jpeg(tf.constant(image_tensor)).numpy()
+ example = tf.train.Example(
+ features=tf.train.Features(
+ feature={
+ 'image/encoded':
+ dataset_util.bytes_feature(encoded_jpeg),
+ 'image/format':
+ dataset_util.bytes_feature(six.b('jpeg')),
+ 'image/source_id':
+ dataset_util.bytes_feature(six.b('image_id')),
+ })).SerializeToString()
+ return [example]
+
+ @parameterized.parameters(
+ {'input_type': 'image_tensor'},
+ {'input_type': 'encoded_image_string_tensor'},
+ {'input_type': 'tf_example'},
+ {'input_type': 'float_image_tensor'},
+ )
+ def test_export_saved_model_and_run_inference(
+ self, input_type='image_tensor'):
+ tmp_dir = self.get_temp_dir()
+ self._save_checkpoint_from_mock_model(tmp_dir)
+ with mock.patch.object(
+ model_builder, 'build', autospec=True) as mock_builder:
+ mock_builder.return_value = FakeModel()
+ output_directory = os.path.join(tmp_dir, 'output')
+ pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
+ exporter_lib_v2.export_inference_graph(
+ input_type=input_type,
+ pipeline_config=pipeline_config,
+ trained_checkpoint_dir=tmp_dir,
+ output_directory=output_directory)
+
+ saved_model_path = os.path.join(output_directory, 'saved_model')
+ detect_fn = tf.saved_model.load(saved_model_path)
+ image = self.get_dummy_input(input_type)
+ detections = detect_fn(image)
+
+ detection_fields = fields.DetectionResultFields
+ self.assertAllClose(detections[detection_fields.detection_boxes],
+ [[[0.0, 0.0, 0.5, 0.5],
+ [0.5, 0.5, 0.8, 0.8]],
+ [[0.5, 0.5, 1.0, 1.0],
+ [0.0, 0.0, 0.0, 0.0]]])
+ self.assertAllClose(detections[detection_fields.detection_scores],
+ [[0.7, 0.6], [0.9, 0.0]])
+ self.assertAllClose(detections[detection_fields.detection_classes],
+ [[1, 2], [2, 1]])
+ self.assertAllClose(detections[detection_fields.num_detections], [2, 1])
+
+ def test_export_checkpoint_and_run_inference_with_image(self):
+ tmp_dir = self.get_temp_dir()
+ self._save_checkpoint_from_mock_model(tmp_dir, conv_weight_scalar=2.0)
+ with mock.patch.object(
+ model_builder, 'build', autospec=True) as mock_builder:
+ mock_builder.return_value = FakeModel()
+ output_directory = os.path.join(tmp_dir, 'output')
+ pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
+ exporter_lib_v2.export_inference_graph(
+ input_type='image_tensor',
+ pipeline_config=pipeline_config,
+ trained_checkpoint_dir=tmp_dir,
+ output_directory=output_directory)
+
+ mock_model = FakeModel()
+ ckpt = tf.compat.v2.train.Checkpoint(
+ model=mock_model)
+ checkpoint_dir = os.path.join(tmp_dir, 'output', 'checkpoint')
+ manager = tf.compat.v2.train.CheckpointManager(
+ ckpt, checkpoint_dir, max_to_keep=7)
+ ckpt.restore(manager.latest_checkpoint).expect_partial()
+
+ fake_image = tf.ones(shape=[1, 5, 5, 3], dtype=tf.float32)
+ preprocessed_inputs, true_image_shapes = mock_model.preprocess(fake_image)
+ predictions = mock_model.predict(preprocessed_inputs, true_image_shapes)
+ detections = mock_model.postprocess(predictions, true_image_shapes)
+
+ # 150 = conv_weight_scalar * height * width * channels = 2 * 5 * 5 * 3.
+ self.assertAllClose(detections['detection_scores'],
+ [[150 + 0.7, 150 + 0.6], [150 + 0.9, 150 + 0.0]])
+
+
+if __name__ == '__main__':
+ tf.enable_v2_behavior()
+ tf.test.main()
diff --git a/research/object_detection/exporter_lib_v2.py b/research/object_detection/exporter_lib_v2.py
new file mode 100644
index 0000000000000000000000000000000000000000..a7ecb45adb14f1b20c2291a3cf67376ad07194eb
--- /dev/null
+++ b/research/object_detection/exporter_lib_v2.py
@@ -0,0 +1,182 @@
+# Lint as: python2, python3
+# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+
+"""Functions to export object detection inference graph."""
+import os
+import tensorflow.compat.v2 as tf
+from object_detection.builders import model_builder
+from object_detection.core import standard_fields as fields
+from object_detection.data_decoders import tf_example_decoder
+from object_detection.utils import config_util
+
+
+def _decode_image(encoded_image_string_tensor):
+ image_tensor = tf.image.decode_image(encoded_image_string_tensor,
+ channels=3)
+ image_tensor.set_shape((None, None, 3))
+ return image_tensor
+
+
+def _decode_tf_example(tf_example_string_tensor):
+ tensor_dict = tf_example_decoder.TfExampleDecoder().decode(
+ tf_example_string_tensor)
+ image_tensor = tensor_dict[fields.InputDataFields.image]
+ return image_tensor
+
+
+class DetectionInferenceModule(tf.Module):
+ """Detection Inference Module."""
+
+ def __init__(self, detection_model):
+ """Initializes a module for detection.
+
+ Args:
+ detection_model: The detection model to use for inference.
+ """
+ self._model = detection_model
+
+ def _run_inference_on_images(self, image):
+ """Cast image to float and run inference.
+
+ Args:
+ image: uint8 Tensor of shape [1, None, None, 3]
+ Returns:
+ Tensor dictionary holding detections.
+ """
+ label_id_offset = 1
+
+ image = tf.cast(image, tf.float32)
+ image, shapes = self._model.preprocess(image)
+ prediction_dict = self._model.predict(image, shapes)
+ detections = self._model.postprocess(prediction_dict, shapes)
+ classes_field = fields.DetectionResultFields.detection_classes
+ detections[classes_field] = (
+ tf.cast(detections[classes_field], tf.float32) + label_id_offset)
+
+ for key, val in detections.items():
+ detections[key] = tf.cast(val, tf.float32)
+
+ return detections
+
+
+class DetectionFromImageModule(DetectionInferenceModule):
+ """Detection Inference Module for image inputs."""
+
+ @tf.function(
+ input_signature=[
+ tf.TensorSpec(shape=[1, None, None, 3], dtype=tf.uint8)])
+ def __call__(self, input_tensor):
+ return self._run_inference_on_images(input_tensor)
+
+
+class DetectionFromFloatImageModule(DetectionInferenceModule):
+ """Detection Inference Module for float image inputs."""
+
+ @tf.function(
+ input_signature=[
+ tf.TensorSpec(shape=[1, None, None, 3], dtype=tf.float32)])
+ def __call__(self, input_tensor):
+ return self._run_inference_on_images(input_tensor)
+
+
+class DetectionFromEncodedImageModule(DetectionInferenceModule):
+ """Detection Inference Module for encoded image string inputs."""
+
+ @tf.function(input_signature=[tf.TensorSpec(shape=[1], dtype=tf.string)])
+ def __call__(self, input_tensor):
+ with tf.device('cpu:0'):
+ image = tf.map_fn(
+ _decode_image,
+ elems=input_tensor,
+ dtype=tf.uint8,
+ parallel_iterations=32,
+ back_prop=False)
+ return self._run_inference_on_images(image)
+
+
+class DetectionFromTFExampleModule(DetectionInferenceModule):
+ """Detection Inference Module for TF.Example inputs."""
+
+ @tf.function(input_signature=[tf.TensorSpec(shape=[1], dtype=tf.string)])
+ def __call__(self, input_tensor):
+ with tf.device('cpu:0'):
+ image = tf.map_fn(
+ _decode_tf_example,
+ elems=input_tensor,
+ dtype=tf.uint8,
+ parallel_iterations=32,
+ back_prop=False)
+ return self._run_inference_on_images(image)
+
+DETECTION_MODULE_MAP = {
+ 'image_tensor': DetectionFromImageModule,
+ 'encoded_image_string_tensor':
+ DetectionFromEncodedImageModule,
+ 'tf_example': DetectionFromTFExampleModule,
+ 'float_image_tensor': DetectionFromFloatImageModule
+}
+
+
+def export_inference_graph(input_type,
+ pipeline_config,
+ trained_checkpoint_dir,
+ output_directory):
+ """Exports inference graph for the model specified in the pipeline config.
+
+ This function creates `output_directory` if it does not already exist,
+ which will hold a copy of the pipeline config with filename `pipeline.config`,
+ and two subdirectories named `checkpoint` and `saved_model`
+ (containing the exported checkpoint and SavedModel respectively).
+
+ Args:
+ input_type: Type of input for the graph. Can be one of ['image_tensor',
+ 'encoded_image_string_tensor', 'tf_example'].
+ pipeline_config: pipeline_pb2.TrainAndEvalPipelineConfig proto.
+ trained_checkpoint_dir: Path to the trained checkpoint file.
+ output_directory: Path to write outputs.
+ Raises:
+ ValueError: if input_type is invalid.
+ """
+ output_checkpoint_directory = os.path.join(output_directory, 'checkpoint')
+ output_saved_model_directory = os.path.join(output_directory, 'saved_model')
+
+ detection_model = model_builder.build(pipeline_config.model,
+ is_training=False)
+
+ ckpt = tf.train.Checkpoint(
+ model=detection_model)
+ manager = tf.train.CheckpointManager(
+ ckpt, trained_checkpoint_dir, max_to_keep=1)
+ status = ckpt.restore(manager.latest_checkpoint).expect_partial()
+
+ if input_type not in DETECTION_MODULE_MAP:
+ raise ValueError('Unrecognized `input_type`')
+ detection_module = DETECTION_MODULE_MAP[input_type](detection_model)
+ # Getting the concrete function traces the graph and forces variables to
+ # be constructed --- only after this can we save the checkpoint and
+ # saved model.
+ concrete_function = detection_module.__call__.get_concrete_function()
+ status.assert_existing_objects_matched()
+
+ exported_checkpoint_manager = tf.train.CheckpointManager(
+ ckpt, output_checkpoint_directory, max_to_keep=1)
+ exported_checkpoint_manager.save(checkpoint_number=0)
+
+ tf.saved_model.save(detection_module,
+ output_saved_model_directory,
+ signatures=concrete_function)
+
+ config_util.save_pipeline_config(pipeline_config, output_directory)
diff --git a/research/object_detection/exporter_main_v2.py b/research/object_detection/exporter_main_v2.py
new file mode 100644
index 0000000000000000000000000000000000000000..a2ba8456039d4584e5998d619f36747d58018418
--- /dev/null
+++ b/research/object_detection/exporter_main_v2.py
@@ -0,0 +1,126 @@
+# Lint as: python2, python3
+# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+
+r"""Tool to export an object detection model for inference.
+
+Prepares an object detection tensorflow graph for inference using model
+configuration and a trained checkpoint. Outputs associated checkpoint files,
+a SavedModel, and a copy of the model config.
+
+The inference graph contains one of three input nodes depending on the user
+specified option.
+ * `image_tensor`: Accepts a uint8 4-D tensor of shape [1, None, None, 3]
+ * `float_image_tensor`: Accepts a float32 4-D tensor of shape
+ [1, None, None, 3]
+ * `encoded_image_string_tensor`: Accepts a 1-D string tensor of shape [None]
+ containing encoded PNG or JPEG images. Image resolutions are expected to be
+ the same if more than 1 image is provided.
+ * `tf_example`: Accepts a 1-D string tensor of shape [None] containing
+ serialized TFExample protos. Image resolutions are expected to be the same
+ if more than 1 image is provided.
+
+and the following output nodes returned by the model.postprocess(..):
+ * `num_detections`: Outputs float32 tensors of the form [batch]
+ that specifies the number of valid boxes per image in the batch.
+ * `detection_boxes`: Outputs float32 tensors of the form
+ [batch, num_boxes, 4] containing detected boxes.
+ * `detection_scores`: Outputs float32 tensors of the form
+ [batch, num_boxes] containing class scores for the detections.
+ * `detection_classes`: Outputs float32 tensors of the form
+ [batch, num_boxes] containing classes for the detections.
+
+
+Example Usage:
+--------------
+python exporter_main_v2.py \
+ --input_type image_tensor \
+ --pipeline_config_path path/to/ssd_inception_v2.config \
+ --trained_checkpoint_dir path/to/checkpoint \
+ --output_directory path/to/exported_model_directory
+
+The expected output would be in the directory
+path/to/exported_model_directory (which is created if it does not exist)
+holding two subdirectories (corresponding to checkpoint and SavedModel,
+respectively) and a copy of the pipeline config.
+
+Config overrides (see the `config_override` flag) are text protobufs
+(also of type pipeline_pb2.TrainEvalPipelineConfig) which are used to override
+certain fields in the provided pipeline_config_path. These are useful for
+making small changes to the inference graph that differ from the training or
+eval config.
+
+Example Usage (in which we change the second stage post-processing score
+threshold to be 0.5):
+
+python exporter_main_v2.py \
+ --input_type image_tensor \
+ --pipeline_config_path path/to/ssd_inception_v2.config \
+ --trained_checkpoint_dir path/to/checkpoint \
+ --output_directory path/to/exported_model_directory \
+ --config_override " \
+ model{ \
+ faster_rcnn { \
+ second_stage_post_processing { \
+ batch_non_max_suppression { \
+ score_threshold: 0.5 \
+ } \
+ } \
+ } \
+ }"
+"""
+from absl import app
+from absl import flags
+
+import tensorflow.compat.v2 as tf
+from google.protobuf import text_format
+from object_detection import exporter_lib_v2
+from object_detection.protos import pipeline_pb2
+
+tf.enable_v2_behavior()
+
+
+FLAGS = flags.FLAGS
+
+flags.DEFINE_string('input_type', 'image_tensor', 'Type of input node. Can be '
+ 'one of [`image_tensor`, `encoded_image_string_tensor`, '
+ '`tf_example`, `float_image_tensor`]')
+flags.DEFINE_string('pipeline_config_path', None,
+ 'Path to a pipeline_pb2.TrainEvalPipelineConfig config '
+ 'file.')
+flags.DEFINE_string('trained_checkpoint_dir', None,
+ 'Path to trained checkpoint directory')
+flags.DEFINE_string('output_directory', None, 'Path to write outputs.')
+flags.DEFINE_string('config_override', '',
+ 'pipeline_pb2.TrainEvalPipelineConfig '
+ 'text proto to override pipeline_config_path.')
+
+flags.mark_flag_as_required('pipeline_config_path')
+flags.mark_flag_as_required('trained_checkpoint_dir')
+flags.mark_flag_as_required('output_directory')
+
+
+def main(_):
+ pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
+ with tf.io.gfile.GFile(FLAGS.pipeline_config_path, 'r') as f:
+ text_format.Merge(f.read(), pipeline_config)
+ text_format.Merge(FLAGS.config_override, pipeline_config)
+ exporter_lib_v2.export_inference_graph(
+ FLAGS.input_type, pipeline_config, FLAGS.trained_checkpoint_dir,
+ FLAGS.output_directory)
+
+
+if __name__ == '__main__':
+ app.run(main)
diff --git a/research/object_detection/exporter_test.py b/research/object_detection/exporter_tf1_test.py
similarity index 99%
rename from research/object_detection/exporter_test.py
rename to research/object_detection/exporter_tf1_test.py
index babe41d1e2a76b513f1a00bfefd5c33a8a07f690..b33bafd8db4f77627d6a64a1035f8c08bf6c09ee 100644
--- a/research/object_detection/exporter_test.py
+++ b/research/object_detection/exporter_tf1_test.py
@@ -19,6 +19,7 @@ from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
+import unittest
import numpy as np
import six
import tensorflow.compat.v1 as tf
@@ -33,12 +34,13 @@ from object_detection.core import model
from object_detection.protos import graph_rewriter_pb2
from object_detection.protos import pipeline_pb2
from object_detection.utils import ops
+from object_detection.utils import tf_version
from object_detection.utils import variables_helper
if six.PY2:
import mock # pylint: disable=g-import-not-at-top
else:
- from unittest import mock # pylint: disable=g-import-not-at-top
+ mock = unittest.mock # pylint: disable=g-import-not-at-top, g-importing-member
# pylint: disable=g-import-not-at-top
try:
@@ -103,6 +105,9 @@ class FakeModel(model.DetectionModel):
def restore_map(self, checkpoint_path, fine_tune_checkpoint_type):
pass
+ def restore_from_objects(self, fine_tune_checkpoint_type):
+ pass
+
def loss(self, prediction_dict, true_image_shapes):
pass
@@ -113,6 +118,7 @@ class FakeModel(model.DetectionModel):
pass
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.')
class ExportInferenceGraphTest(tf.test.TestCase):
def _save_checkpoint_from_mock_model(self,
diff --git a/research/object_detection/g3doc/context_rcnn.md b/research/object_detection/g3doc/context_rcnn.md
new file mode 100644
index 0000000000000000000000000000000000000000..8d132b15b28ecc2d33581c9087804bb7d87000cf
--- /dev/null
+++ b/research/object_detection/g3doc/context_rcnn.md
@@ -0,0 +1,199 @@
+# Context R-CNN
+
+Context R-CNN is an object detection model that uses contextual features to
+improve object detection. See https://arxiv.org/abs/1912.03538 for more details.
+
+## Table of Contents
+
+* [Preparing Context Data for Context R-CNN](#preparing-context-data-for-context-r-cnn)
+ + [Generating TfRecords from a set of images and a COCO-CameraTraps style
+ JSON](#generating-tfrecords-from-a-set-of-images-and-a-coco-cameratraps-style-json)
+ + [Generating weakly-supervised bounding box labels for image-labeled data](#generating-weakly-supervised-bounding-box-labels-for-image-labeled-data)
+ + [Generating and saving contextual features for each image](#generating-and-saving-contextual-features-for-each-image)
+ + [Building up contextual memory banks and storing them for each context
+ group](#building-up-contextual-memory-banks-and-storing-them-for-each-context-group)
+- [Training a Context R-CNN Model](#training-a-context-r-cnn-model)
+- [Exporting a Context R-CNN Model](#exporting-a-context-r-cnn-model)
+
+## Preparing Context Data for Context R-CNN
+
+In this section, we will walk through the process of generating TfRecords with
+contextual features. We focus on building context from object-centric features
+generated with a pre-trained Faster R-CNN model, but you can adapt the provided
+code to use alternative feature extractors.
+
+Each of these data processing scripts uses Apache Beam, which can be installed
+using
+
+```
+pip install apache-beam
+```
+
+and can be run locally, or on a cluster for efficient processing of large
+amounts of data. Note that generate_detection_data.py and
+generate_embedding_data.py both involve running inference, and may be very slow
+to run locally. See the
+[Apache Beam documentation](https://beam.apache.org/documentation/runners/dataflow/)
+for more information, and Google Cloud Documentation for a tutorial on
+[running Beam jobs on DataFlow](https://cloud.google.com/dataflow/docs/quickstarts/quickstart-python).
+
+### Generating TfRecords from a set of images and a COCO-CameraTraps style JSON
+
+If your data is already stored in TfRecords, you can skip this first step.
+
+We assume a COCO-CameraTraps json format, as described on
+[LILA.science](https://github.com/microsoft/CameraTraps/blob/master/data_management/README.md).
+
+COCO-CameraTraps is a format that adds static-camera-specific fields, such as a
+location ID and datetime, to the well-established COCO format. To generate
+appropriate context later on, be sure you have specified each contextual group
+with a different location ID, which in the static camera case would be the ID of
+the camera, as well as the datetime each photo was taken. We assume that empty
+images will be labeled 'empty' with class id 0.
+
+To generate TfRecords from your database and local image folder, run
+
+```
+python object_detection/dataset_tools/context_rcnn/create_cococameratraps_tfexample_main.py \
+ --alsologtostderr \
+ --output_tfrecord_prefix="/path/to/output/tfrecord/location/prefix" \
+ --image_directory="/path/to/image/folder/" \
+ --input_annotations_file="path/to/annotations.json"
+```
+
+### Generating weakly-supervised bounding box labels for image-labeled data
+
+If all your data already has bounding box labels you can skip this step.
+
+Many camera trap datasets do not have bounding box labels, or only have bounding
+box labels for some of the data. We have provided code to add bounding boxes
+from a pretrained model (such as the
+[Microsoft AI for Earth MegaDetector](https://github.com/microsoft/CameraTraps/blob/master/megadetector.md))
+and match the boxes to the image-level class label.
+
+To export your pretrained detection model, run
+
+```
+python object_detection/export_inference_graph.py \
+ --alsologtostderr \
+ --input_type tf_example \
+ --pipeline_config_path path/to/faster_rcnn_model.config \
+ --trained_checkpoint_prefix path/to/model.ckpt \
+ --output_directory path/to/exported_model_directory
+```
+
+To add bounding boxes to your dataset using the above model, run
+
+```
+python object_detection/dataset_tools/context_rcnn/generate_detection_data.py \
+ --alsologtostderr \
+ --input_tfrecord path/to/input_tfrecord@X \
+ --output_tfrecord path/to/output_tfrecord@X \
+ --model_dir path/to/exported_model_directory/saved_model
+```
+
+If an image already has bounding box labels, those labels are left unchanged. If
+an image is labeled 'empty' (class ID 0), we will not generate boxes for that
+image.
+
+### Generating and saving contextual features for each image
+
+We next extract and store features for each image from a pretrained model. This
+model can be the same model as above, or be a class-specific detection model
+trained on data from your classes of interest.
+
+To export your pretrained detection model, run
+
+```
+python object_detection/export_inference_graph.py \
+ --alsologtostderr \
+ --input_type tf_example \
+ --pipeline_config_path path/to/pipeline.config \
+ --trained_checkpoint_prefix path/to/model.ckpt \
+ --output_directory path/to/exported_model_directory \
+ --additional_output_tensor_names detection_features
+```
+
+Make sure that you have set `output_final_box_features: true` within
+your config file before exporting. This is needed to export the features as an
+output, but it does not need to be set during training.
+
+To generate and save contextual features for your data, run
+
+```
+python object_detection/dataset_tools/context_rcnn/generate_embedding_data.py \
+ --alsologtostderr \
+ --embedding_input_tfrecord path/to/input_tfrecords* \
+ --embedding_output_tfrecord path/to/output_tfrecords \
+ --embedding_model_dir path/to/exported_model_directory/saved_model
+```
+
+### Building up contextual memory banks and storing them for each context group
+
+To build the context features you just added for each image into memory banks,
+run
+
+```
+python object_detection/dataset_tools/context_rcnn/add_context_to_examples.py \
+ --input_tfrecord path/to/input_tfrecords* \
+ --output_tfrecord path/to/output_tfrecords \
+ --sequence_key image/location \
+ --time_horizon month
+```
+
+where the input_tfrecords for add_context_to_examples.py are the
+output_tfrecords from generate_embedding_data.py.
+
+For all options, see add_context_to_examples.py. By default, this code builds
+TfSequenceExamples, which are more data efficient (this allows you to store the
+context features once for each context group, as opposed to once per image). If
+you would like to export TfExamples instead, set flag `--output_type
+tf_example`.
+
+If you use TfSequenceExamples, you must be sure to set `input_type:
+TF_SEQUENCE_EXAMPLE` within your Context R-CNN configs for both
+train_input_reader and test_input_reader. See
+`object_detection/test_data/context_rcnn_camera_trap.config`
+for an example.
+
+## Training a Context R-CNN Model
+
+To train a Context R-CNN model, you must first set up your config file. See
+`test_data/context_rcnn_camera_trap.config` for an example. The important
+difference between this config and a Faster R-CNN config is the inclusion of a
+`context_config` within the model, which defines the necessary Context R-CNN
+parameters.
+
+```
+context_config {
+ max_num_context_features: 2000
+ context_feature_length: 2057
+ }
+```
+
+Once your config file has been updated with your local paths, you can follow
+along with documentation for running [locally](running_locally.md), or
+[on the cloud](running_on_cloud.md).
+
+## Exporting a Context R-CNN Model
+
+Since Context R-CNN takes context features as well as images as input, we have
+to explicitly define the other inputs ("side_inputs") to the model when
+exporting, as below. This example is shown with default context feature shapes.
+
+```
+python export_inference_graph.py \
+ --input_type image_tensor \
+ --input_shape 1,-1,-1,3 \
+ --pipeline_config_path /path/to/context_rcnn_model/pipeline.config \
+ --trained_checkpoint_prefix /path/to/context_rcnn_model/model.ckpt \
+ --output_directory /path/to/output_directory \
+ --use_side_inputs True \
+ --side_input_shapes 1,2000,2057/1 \
+ --side_input_names context_features,valid_context_size \
+ --side_input_types float,int
+
+```
+
+If you have questions about Context R-CNN, please contact
+[Sara Beery](https://beerys.github.io/).
diff --git a/research/object_detection/g3doc/detection_model_zoo.md b/research/object_detection/g3doc/detection_model_zoo.md
index b13fe6a3123ab1336afe0b7e8660753893040cb7..cb515b813ba8296005da503703bb659b9cb8b9dd 100644
--- a/research/object_detection/g3doc/detection_model_zoo.md
+++ b/research/object_detection/g3doc/detection_model_zoo.md
@@ -1,32 +1,34 @@
# Tensorflow detection model zoo
-We provide a collection of detection models pre-trained on the [COCO
-dataset](http://cocodataset.org), the [Kitti dataset](http://www.cvlibs.net/datasets/kitti/),
-the
+We provide a collection of detection models pre-trained on the
+[COCO dataset](http://cocodataset.org), the
+[Kitti dataset](http://www.cvlibs.net/datasets/kitti/), the
[Open Images dataset](https://storage.googleapis.com/openimages/web/index.html),
-the [AVA v2.1 dataset](https://research.google.com/ava/) and the
-[iNaturalist Species Detection Dataset](https://github.com/visipedia/inat_comp/blob/master/2017/README.md#bounding-boxes).
+the [AVA v2.1 dataset](https://research.google.com/ava/) the
+[iNaturalist Species Detection Dataset](https://github.com/visipedia/inat_comp/blob/master/2017/README.md#bounding-boxes)
+and the
+[Snapshot Serengeti Dataset](http://lila.science/datasets/snapshot-serengeti).
These models can be useful for out-of-the-box inference if you are interested in
categories already in those datasets. They are also useful for initializing your
models when training on novel datasets.
In the table below, we list each such pre-trained model including:
-* a model name that corresponds to a config file that was used to train this
- model in the `samples/configs` directory,
-* a download link to a tar.gz file containing the pre-trained model,
-* model speed --- we report running time in ms per 600x600 image (including all
- pre and post-processing), but please be
- aware that these timings depend highly on one's specific hardware
- configuration (these timings were performed using an Nvidia
- GeForce GTX TITAN X card) and should be treated more as relative timings in
- many cases. Also note that desktop GPU timing does not always reflect mobile
- run time. For example Mobilenet V2 is faster on mobile devices than Mobilenet
- V1, but is slightly slower on desktop GPU.
-* detector performance on subset of the COCO validation set or Open Images test split as measured by the dataset-specific mAP measure.
- Here, higher is better, and we only report bounding box mAP rounded to the
- nearest integer.
-* Output types (`Boxes`, and `Masks` if applicable )
+* a model name that corresponds to a config file that was used to train this
+ model in the `samples/configs` directory,
+* a download link to a tar.gz file containing the pre-trained model,
+* model speed --- we report running time in ms per 600x600 image (including
+ all pre and post-processing), but please be aware that these timings depend
+ highly on one's specific hardware configuration (these timings were
+ performed using an Nvidia GeForce GTX TITAN X card) and should be treated
+ more as relative timings in many cases. Also note that desktop GPU timing
+ does not always reflect mobile run time. For example Mobilenet V2 is faster
+ on mobile devices than Mobilenet V1, but is slightly slower on desktop GPU.
+* detector performance on subset of the COCO validation set, Open Images test
+ split, iNaturalist test split, or Snapshot Serengeti LILA.science test
+ split. as measured by the dataset-specific mAP measure. Here, higher is
+ better, and we only report bounding box mAP rounded to the nearest integer.
+* Output types (`Boxes`, and `Masks` if applicable )
You can un-tar each tar.gz file via, e.g.,:
@@ -53,118 +55,133 @@ Inside the un-tar'ed directory, you will find:
Some remarks on frozen inference graphs:
-* If you try to evaluate the frozen graph, you may find performance numbers for
- some of the models to be slightly lower than what we report in the below
- tables. This is because we discard detections with scores below a
- threshold (typically 0.3) when creating the frozen graph. This corresponds
- effectively to picking a point on the precision recall curve of
- a detector (and discarding the part past that point), which negatively impacts
- standard mAP metrics.
-* Our frozen inference graphs are generated using the
- [v1.12.0](https://github.com/tensorflow/tensorflow/tree/v1.12.0)
- release version of Tensorflow and we do not guarantee that these will work
- with other versions; this being said, each frozen inference graph can be
- regenerated using your current version of Tensorflow by re-running the
- [exporter](https://github.com/tensorflow/models/blob/master/research/object_detection/g3doc/exporting_models.md),
- pointing it at the model directory as well as the corresponding config file in
- [samples/configs](https://github.com/tensorflow/models/tree/master/research/object_detection/samples/configs).
-
+* If you try to evaluate the frozen graph, you may find performance numbers
+ for some of the models to be slightly lower than what we report in the below
+ tables. This is because we discard detections with scores below a threshold
+ (typically 0.3) when creating the frozen graph. This corresponds effectively
+ to picking a point on the precision recall curve of a detector (and
+ discarding the part past that point), which negatively impacts standard mAP
+ metrics.
+* Our frozen inference graphs are generated using the
+ [v1.12.0](https://github.com/tensorflow/tensorflow/tree/v1.12.0) release
+ version of Tensorflow and we do not guarantee that these will work with
+ other versions; this being said, each frozen inference graph can be
+ regenerated using your current version of Tensorflow by re-running the
+ [exporter](https://github.com/tensorflow/models/blob/master/research/object_detection/g3doc/exporting_models.md),
+ pointing it at the model directory as well as the corresponding config file
+ in
+ [samples/configs](https://github.com/tensorflow/models/tree/master/research/object_detection/samples/configs).
## COCO-trained models
-| Model name | Speed (ms) | COCO mAP[^1] | Outputs |
-| ------------ | :--------------: | :--------------: | :-------------: |
-| [ssd_mobilenet_v1_coco](http://download.tensorflow.org/models/object_detection/ssd_mobilenet_v1_coco_2018_01_28.tar.gz) | 30 | 21 | Boxes |
-| [ssd_mobilenet_v1_0.75_depth_coco ☆](http://download.tensorflow.org/models/object_detection/ssd_mobilenet_v1_0.75_depth_300x300_coco14_sync_2018_07_03.tar.gz) | 26 | 18 | Boxes |
-| [ssd_mobilenet_v1_quantized_coco ☆](http://download.tensorflow.org/models/object_detection/ssd_mobilenet_v1_quantized_300x300_coco14_sync_2018_07_18.tar.gz) | 29 | 18 | Boxes |
-| [ssd_mobilenet_v1_0.75_depth_quantized_coco ☆](http://download.tensorflow.org/models/object_detection/ssd_mobilenet_v1_0.75_depth_quantized_300x300_coco14_sync_2018_07_18.tar.gz) | 29 | 16 | Boxes |
-| [ssd_mobilenet_v1_ppn_coco ☆](http://download.tensorflow.org/models/object_detection/ssd_mobilenet_v1_ppn_shared_box_predictor_300x300_coco14_sync_2018_07_03.tar.gz) | 26 | 20 | Boxes |
-| [ssd_mobilenet_v1_fpn_coco ☆](http://download.tensorflow.org/models/object_detection/ssd_mobilenet_v1_fpn_shared_box_predictor_640x640_coco14_sync_2018_07_03.tar.gz) | 56 | 32 | Boxes |
-| [ssd_resnet_50_fpn_coco ☆](http://download.tensorflow.org/models/object_detection/ssd_resnet50_v1_fpn_shared_box_predictor_640x640_coco14_sync_2018_07_03.tar.gz) | 76 | 35 | Boxes |
-| [ssd_mobilenet_v2_coco](http://download.tensorflow.org/models/object_detection/ssd_mobilenet_v2_coco_2018_03_29.tar.gz) | 31 | 22 | Boxes |
-| [ssd_mobilenet_v2_quantized_coco](http://download.tensorflow.org/models/object_detection/ssd_mobilenet_v2_quantized_300x300_coco_2019_01_03.tar.gz) | 29 | 22 | Boxes |
-| [ssdlite_mobilenet_v2_coco](http://download.tensorflow.org/models/object_detection/ssdlite_mobilenet_v2_coco_2018_05_09.tar.gz) | 27 | 22 | Boxes |
-| [ssd_inception_v2_coco](http://download.tensorflow.org/models/object_detection/ssd_inception_v2_coco_2018_01_28.tar.gz) | 42 | 24 | Boxes |
-| [faster_rcnn_inception_v2_coco](http://download.tensorflow.org/models/object_detection/faster_rcnn_inception_v2_coco_2018_01_28.tar.gz) | 58 | 28 | Boxes |
-| [faster_rcnn_resnet50_coco](http://download.tensorflow.org/models/object_detection/faster_rcnn_resnet50_coco_2018_01_28.tar.gz) | 89 | 30 | Boxes |
-| [faster_rcnn_resnet50_lowproposals_coco](http://download.tensorflow.org/models/object_detection/faster_rcnn_resnet50_lowproposals_coco_2018_01_28.tar.gz) | 64 | | Boxes |
-| [rfcn_resnet101_coco](http://download.tensorflow.org/models/object_detection/rfcn_resnet101_coco_2018_01_28.tar.gz) | 92 | 30 | Boxes |
-| [faster_rcnn_resnet101_coco](http://download.tensorflow.org/models/object_detection/faster_rcnn_resnet101_coco_2018_01_28.tar.gz) | 106 | 32 | Boxes |
-| [faster_rcnn_resnet101_lowproposals_coco](http://download.tensorflow.org/models/object_detection/faster_rcnn_resnet101_lowproposals_coco_2018_01_28.tar.gz) | 82 | | Boxes |
-| [faster_rcnn_inception_resnet_v2_atrous_coco](http://download.tensorflow.org/models/object_detection/faster_rcnn_inception_resnet_v2_atrous_coco_2018_01_28.tar.gz) | 620 | 37 | Boxes |
-| [faster_rcnn_inception_resnet_v2_atrous_lowproposals_coco](http://download.tensorflow.org/models/object_detection/faster_rcnn_inception_resnet_v2_atrous_lowproposals_coco_2018_01_28.tar.gz) | 241 | | Boxes |
-| [faster_rcnn_nas](http://download.tensorflow.org/models/object_detection/faster_rcnn_nas_coco_2018_01_28.tar.gz) | 1833 | 43 | Boxes |
-| [faster_rcnn_nas_lowproposals_coco](http://download.tensorflow.org/models/object_detection/faster_rcnn_nas_lowproposals_coco_2018_01_28.tar.gz) | 540 | | Boxes |
-| [mask_rcnn_inception_resnet_v2_atrous_coco](http://download.tensorflow.org/models/object_detection/mask_rcnn_inception_resnet_v2_atrous_coco_2018_01_28.tar.gz) | 771 | 36 | Masks |
-| [mask_rcnn_inception_v2_coco](http://download.tensorflow.org/models/object_detection/mask_rcnn_inception_v2_coco_2018_01_28.tar.gz) | 79 | 25 | Masks |
-| [mask_rcnn_resnet101_atrous_coco](http://download.tensorflow.org/models/object_detection/mask_rcnn_resnet101_atrous_coco_2018_01_28.tar.gz) | 470 | 33 | Masks |
-| [mask_rcnn_resnet50_atrous_coco](http://download.tensorflow.org/models/object_detection/mask_rcnn_resnet50_atrous_coco_2018_01_28.tar.gz) | 343 | 29 | Masks |
-
-Note: The asterisk (☆) at the end of model name indicates that this model supports TPU training.
-
-Note: If you download the tar.gz file of quantized models and un-tar, you will get different set of files - a checkpoint, a config file and tflite frozen graphs (txt/binary).
-
+Model name | Speed (ms) | COCO mAP[^1] | Outputs
+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | :--------: | :----------: | :-----:
+[ssd_mobilenet_v1_coco](http://download.tensorflow.org/models/object_detection/ssd_mobilenet_v1_coco_2018_01_28.tar.gz) | 30 | 21 | Boxes
+[ssd_mobilenet_v1_0.75_depth_coco ☆](http://download.tensorflow.org/models/object_detection/ssd_mobilenet_v1_0.75_depth_300x300_coco14_sync_2018_07_03.tar.gz) | 26 | 18 | Boxes
+[ssd_mobilenet_v1_quantized_coco ☆](http://download.tensorflow.org/models/object_detection/ssd_mobilenet_v1_quantized_300x300_coco14_sync_2018_07_18.tar.gz) | 29 | 18 | Boxes
+[ssd_mobilenet_v1_0.75_depth_quantized_coco ☆](http://download.tensorflow.org/models/object_detection/ssd_mobilenet_v1_0.75_depth_quantized_300x300_coco14_sync_2018_07_18.tar.gz) | 29 | 16 | Boxes
+[ssd_mobilenet_v1_ppn_coco ☆](http://download.tensorflow.org/models/object_detection/ssd_mobilenet_v1_ppn_shared_box_predictor_300x300_coco14_sync_2018_07_03.tar.gz) | 26 | 20 | Boxes
+[ssd_mobilenet_v1_fpn_coco ☆](http://download.tensorflow.org/models/object_detection/ssd_mobilenet_v1_fpn_shared_box_predictor_640x640_coco14_sync_2018_07_03.tar.gz) | 56 | 32 | Boxes
+[ssd_resnet_50_fpn_coco ☆](http://download.tensorflow.org/models/object_detection/ssd_resnet50_v1_fpn_shared_box_predictor_640x640_coco14_sync_2018_07_03.tar.gz) | 76 | 35 | Boxes
+[ssd_mobilenet_v2_coco](http://download.tensorflow.org/models/object_detection/ssd_mobilenet_v2_coco_2018_03_29.tar.gz) | 31 | 22 | Boxes
+[ssd_mobilenet_v2_quantized_coco](http://download.tensorflow.org/models/object_detection/ssd_mobilenet_v2_quantized_300x300_coco_2019_01_03.tar.gz) | 29 | 22 | Boxes
+[ssdlite_mobilenet_v2_coco](http://download.tensorflow.org/models/object_detection/ssdlite_mobilenet_v2_coco_2018_05_09.tar.gz) | 27 | 22 | Boxes
+[ssd_inception_v2_coco](http://download.tensorflow.org/models/object_detection/ssd_inception_v2_coco_2018_01_28.tar.gz) | 42 | 24 | Boxes
+[faster_rcnn_inception_v2_coco](http://download.tensorflow.org/models/object_detection/faster_rcnn_inception_v2_coco_2018_01_28.tar.gz) | 58 | 28 | Boxes
+[faster_rcnn_resnet50_coco](http://download.tensorflow.org/models/object_detection/faster_rcnn_resnet50_coco_2018_01_28.tar.gz) | 89 | 30 | Boxes
+[faster_rcnn_resnet50_lowproposals_coco](http://download.tensorflow.org/models/object_detection/faster_rcnn_resnet50_lowproposals_coco_2018_01_28.tar.gz) | 64 | | Boxes
+[rfcn_resnet101_coco](http://download.tensorflow.org/models/object_detection/rfcn_resnet101_coco_2018_01_28.tar.gz) | 92 | 30 | Boxes
+[faster_rcnn_resnet101_coco](http://download.tensorflow.org/models/object_detection/faster_rcnn_resnet101_coco_2018_01_28.tar.gz) | 106 | 32 | Boxes
+[faster_rcnn_resnet101_lowproposals_coco](http://download.tensorflow.org/models/object_detection/faster_rcnn_resnet101_lowproposals_coco_2018_01_28.tar.gz) | 82 | | Boxes
+[faster_rcnn_inception_resnet_v2_atrous_coco](http://download.tensorflow.org/models/object_detection/faster_rcnn_inception_resnet_v2_atrous_coco_2018_01_28.tar.gz) | 620 | 37 | Boxes
+[faster_rcnn_inception_resnet_v2_atrous_lowproposals_coco](http://download.tensorflow.org/models/object_detection/faster_rcnn_inception_resnet_v2_atrous_lowproposals_coco_2018_01_28.tar.gz) | 241 | | Boxes
+[faster_rcnn_nas](http://download.tensorflow.org/models/object_detection/faster_rcnn_nas_coco_2018_01_28.tar.gz) | 1833 | 43 | Boxes
+[faster_rcnn_nas_lowproposals_coco](http://download.tensorflow.org/models/object_detection/faster_rcnn_nas_lowproposals_coco_2018_01_28.tar.gz) | 540 | | Boxes
+[mask_rcnn_inception_resnet_v2_atrous_coco](http://download.tensorflow.org/models/object_detection/mask_rcnn_inception_resnet_v2_atrous_coco_2018_01_28.tar.gz) | 771 | 36 | Masks
+[mask_rcnn_inception_v2_coco](http://download.tensorflow.org/models/object_detection/mask_rcnn_inception_v2_coco_2018_01_28.tar.gz) | 79 | 25 | Masks
+[mask_rcnn_resnet101_atrous_coco](http://download.tensorflow.org/models/object_detection/mask_rcnn_resnet101_atrous_coco_2018_01_28.tar.gz) | 470 | 33 | Masks
+[mask_rcnn_resnet50_atrous_coco](http://download.tensorflow.org/models/object_detection/mask_rcnn_resnet50_atrous_coco_2018_01_28.tar.gz) | 343 | 29 | Masks
+
+Note: The asterisk (☆) at the end of model name indicates that this model
+supports TPU training.
+
+Note: If you download the tar.gz file of quantized models and un-tar, you will
+get different set of files - a checkpoint, a config file and tflite frozen
+graphs (txt/binary).
### Mobile models
Model name | Pixel 1 Latency (ms) | COCO mAP | Outputs
------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | :------------------: | :------: | :-----:
-[ssd_mobiledet_cpu_coco](http://download.tensorflow.org/models/object_detection/ssdlite_mobiledet_cpu_320x320_coco_2020_05_19.tar.gz) | 113 | 24.0 | Boxes
-[ssd_mobilenet_v2_mnasfpn_coco](http://download.tensorflow.org/models/object_detection/ssd_mobilenet_v2_mnasfpn_shared_box_predictor_320x320_coco_sync_2020_05_18.tar.gz) | 183 | 26.6 | Boxes
-[ssd_mobilenet_v3_large_coco](http://download.tensorflow.org/models/object_detection/ssd_mobilenet_v3_large_coco_2020_01_14.tar.gz) | 119 | 22.6 | Boxes
-[ssd_mobilenet_v3_small_coco](http://download.tensorflow.org/models/object_detection/ssd_mobilenet_v3_small_coco_2020_01_14.tar.gz) | 43 | 15.4 | Boxes
+[ssd_mobiledet_cpu_coco](http://download.tensorflow.org/models/object_detection/ssdlite_mobiledet_cpu_320x320_coco_2020_05_19.tar.gz) | 113 | 24.0 | Boxes
+[ssd_mobilenet_v2_mnasfpn_coco](http://download.tensorflow.org/models/object_detection/ssd_mobilenet_v2_mnasfpn_shared_box_predictor_320x320_coco_sync_2020_05_18.tar.gz) | 183 | 26.6 | Boxes
+[ssd_mobilenet_v3_large_coco](http://download.tensorflow.org/models/object_detection/ssd_mobilenet_v3_large_coco_2020_01_14.tar.gz) | 119 | 22.6 | Boxes
+[ssd_mobilenet_v3_small_coco](http://download.tensorflow.org/models/object_detection/ssd_mobilenet_v3_small_coco_2020_01_14.tar.gz) | 43 | 15.4 | Boxes
### Pixel4 Edge TPU models
-Model name | Pixel 4 Edge TPU Latency (ms) | COCO mAP (fp32/uint8) | Outputs
------------------------------------------------------------------------------------------------------------------------------------ | :------------------: | :------: | :-----:
-[ssd_mobiledet_edgetpu_coco](http://download.tensorflow.org/models/object_detection/ssdlite_mobiledet_edgetpu_320x320_coco_2020_05_19.tar.gz) | 6.9 | 25.9/25.6 | Boxes
-[ssd_mobilenet_edgetpu_coco](https://storage.cloud.google.com/mobilenet_edgetpu/checkpoints/ssdlite_mobilenet_edgetpu_coco_quant.tar.gz) | 6.6 | -/24.3 | Boxes
+
+Model name | Pixel 4 Edge TPU Latency (ms) | COCO mAP (fp32/uint8) | Outputs
+--------------------------------------------------------------------------------------------------------------------------------------------- | :---------------------------: | :-------------------: | :-----:
+[ssd_mobiledet_edgetpu_coco](http://download.tensorflow.org/models/object_detection/ssdlite_mobiledet_edgetpu_320x320_coco_2020_05_19.tar.gz) | 6.9 | 25.9/25.6 | Boxes
+[ssd_mobilenet_edgetpu_coco](https://storage.cloud.google.com/mobilenet_edgetpu/checkpoints/ssdlite_mobilenet_edgetpu_coco_quant.tar.gz) | 6.6 | -/24.3 | Boxes
### Pixel4 DSP models
-Model name | Pixel 4 DSP Latency (ms) | COCO mAP (fp32/uint8) | Outputs
------------------------------------------------------------------------------------------------------------------------------------ | :------------------: | :------: | :-----:
-[ssd_mobiledet_dsp_coco](http://download.tensorflow.org/models/object_detection/ssdlite_mobiledet_dsp_320x320_coco_2020_05_19.tar.gz) | 12.3 | 28.9/28.8 | Boxes
+
+Model name | Pixel 4 DSP Latency (ms) | COCO mAP (fp32/uint8) | Outputs
+------------------------------------------------------------------------------------------------------------------------------------- | :----------------------: | :-------------------: | :-----:
+[ssd_mobiledet_dsp_coco](http://download.tensorflow.org/models/object_detection/ssdlite_mobiledet_dsp_320x320_coco_2020_05_19.tar.gz) | 12.3 | 28.9/28.8 | Boxes
## Kitti-trained models
-Model name | Speed (ms) | Pascal mAP@0.5 | Outputs
------------------------------------------------------------------------------------------------------------------------------------------------------------------ | :---: | :-------------: | :-----:
-[faster_rcnn_resnet101_kitti](http://download.tensorflow.org/models/object_detection/faster_rcnn_resnet101_kitti_2018_01_28.tar.gz) | 79 | 87 | Boxes
+Model name | Speed (ms) | Pascal mAP@0.5 | Outputs
+----------------------------------------------------------------------------------------------------------------------------------- | :--------: | :------------: | :-----:
+[faster_rcnn_resnet101_kitti](http://download.tensorflow.org/models/object_detection/faster_rcnn_resnet101_kitti_2018_01_28.tar.gz) | 79 | 87 | Boxes
## Open Images-trained models
Model name | Speed (ms) | Open Images mAP@0.5[^2] | Outputs
--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | :--------: | :---------------------: | :-----:
-[faster_rcnn_inception_resnet_v2_atrous_oidv2](http://download.tensorflow.org/models/object_detection/faster_rcnn_inception_resnet_v2_atrous_oid_2018_01_28.tar.gz) | 727 | 37 | Boxes
+[faster_rcnn_inception_resnet_v2_atrous_oidv2](http://download.tensorflow.org/models/object_detection/faster_rcnn_inception_resnet_v2_atrous_oid_2018_01_28.tar.gz) | 727 | 37 | Boxes
[faster_rcnn_inception_resnet_v2_atrous_lowproposals_oidv2](http://download.tensorflow.org/models/object_detection/faster_rcnn_inception_resnet_v2_atrous_lowproposals_oid_2018_01_28.tar.gz) | 347 | | Boxes
[facessd_mobilenet_v2_quantized_open_image_v4](http://download.tensorflow.org/models/object_detection/facessd_mobilenet_v2_quantized_320x320_open_image_v4.tar.gz) [^3] | 20 | 73 (faces) | Boxes
-Model name | Speed (ms) | Open Images mAP@0.5[^4] | Outputs
---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | :--------: | :---------------------: | :-----:
-[faster_rcnn_inception_resnet_v2_atrous_oidv4](http://download.tensorflow.org/models/object_detection/faster_rcnn_inception_resnet_v2_atrous_oid_v4_2018_12_12.tar.gz) | 425 | 54 | Boxes
-[ssd_mobilenetv2_oidv4](http://download.tensorflow.org/models/object_detection/ssd_mobilenet_v2_oid_v4_2018_12_12.tar.gz) | 89 | 36 | Boxes
-[ssd_resnet_101_fpn_oidv4](http://download.tensorflow.org/models/object_detection/ssd_resnet101_v1_fpn_shared_box_predictor_oid_512x512_sync_2019_01_20.tar.gz) | 237 | 38 | Boxes
-## iNaturalist Species-trained models
+Model name | Speed (ms) | Open Images mAP@0.5[^4] | Outputs
+---------------------------------------------------------------------------------------------------------------------------------------------------------------------- | :--------: | :---------------------: | :-----:
+[faster_rcnn_inception_resnet_v2_atrous_oidv4](http://download.tensorflow.org/models/object_detection/faster_rcnn_inception_resnet_v2_atrous_oid_v4_2018_12_12.tar.gz) | 425 | 54 | Boxes
+[ssd_mobilenetv2_oidv4](http://download.tensorflow.org/models/object_detection/ssd_mobilenet_v2_oid_v4_2018_12_12.tar.gz) | 89 | 36 | Boxes
+[ssd_resnet_101_fpn_oidv4](http://download.tensorflow.org/models/object_detection/ssd_resnet101_v1_fpn_shared_box_predictor_oid_512x512_sync_2019_01_20.tar.gz) | 237 | 38 | Boxes
-Model name | Speed (ms) | Pascal mAP@0.5 | Outputs
------------------------------------------------------------------------------------------------------------------------------------------------------------------ | :---: | :-------------: | :-----:
-[faster_rcnn_resnet101_fgvc](http://download.tensorflow.org/models/object_detection/faster_rcnn_resnet101_fgvc_2018_07_19.tar.gz) | 395 | 58 | Boxes
-[faster_rcnn_resnet50_fgvc](http://download.tensorflow.org/models/object_detection/faster_rcnn_resnet50_fgvc_2018_07_19.tar.gz) | 366 | 55 | Boxes
+## iNaturalist Species-trained models
+Model name | Speed (ms) | Pascal mAP@0.5 | Outputs
+--------------------------------------------------------------------------------------------------------------------------------- | :--------: | :------------: | :-----:
+[faster_rcnn_resnet101_fgvc](http://download.tensorflow.org/models/object_detection/faster_rcnn_resnet101_fgvc_2018_07_19.tar.gz) | 395 | 58 | Boxes
+[faster_rcnn_resnet50_fgvc](http://download.tensorflow.org/models/object_detection/faster_rcnn_resnet50_fgvc_2018_07_19.tar.gz) | 366 | 55 | Boxes
## AVA v2.1 trained models
-Model name | Speed (ms) | Pascal mAP@0.5 | Outputs
------------------------------------------------------------------------------------------------------------------------------------------------------------------ | :---: | :-------------: | :-----:
-[faster_rcnn_resnet101_ava_v2.1](http://download.tensorflow.org/models/object_detection/faster_rcnn_resnet101_ava_v2.1_2018_04_30.tar.gz) | 93 | 11 | Boxes
-
-
-[^1]: See [MSCOCO evaluation protocol](http://cocodataset.org/#detections-eval). The COCO mAP numbers here are evaluated on COCO 14 minival set (note that our split is different from COCO 17 Val). A full list of image ids used in our split could be fould [here](https://github.com/tensorflow/models/blob/master/research/object_detection/data/mscoco_minival_ids.txt).
-
-
-[^2]: This is PASCAL mAP with a slightly different way of true positives computation: see [Open Images evaluation protocols](evaluation_protocols.md), oid_V2_detection_metrics.
-
-[^3]: Non-face boxes are dropped during training and non-face groundtruth boxes are ignored when evaluating.
-
-[^4]: This is Open Images Challenge metric: see [Open Images evaluation protocols](evaluation_protocols.md), oid_challenge_detection_metrics.
-
+Model name | Speed (ms) | Pascal mAP@0.5 | Outputs
+----------------------------------------------------------------------------------------------------------------------------------------- | :--------: | :------------: | :-----:
+[faster_rcnn_resnet101_ava_v2.1](http://download.tensorflow.org/models/object_detection/faster_rcnn_resnet101_ava_v2.1_2018_04_30.tar.gz) | 93 | 11 | Boxes
+
+## Snapshot Serengeti Camera Trap trained models
+
+Model name | COCO mAP@0.5 | Outputs
+--------------------------------------------------------------------------------------------------------------------------------------------------------------- | :----------: | :-----:
+[faster_rcnn_resnet101_snapshot_serengeti](http://download.tensorflow.org/models/object_detection/faster_rcnn_resnet101_snapshot_serengeti_2020_06_10.tar.gz) | 38 | Boxes
+[context_rcnn_resnet101_snapshot_serengeti](http://download.tensorflow.org/models/object_detection/context_rcnn_resnet101_snapshot_serengeti_2020_06_10.tar.gz) | 56 | Boxes
+
+[^1]: See [MSCOCO evaluation protocol](http://cocodataset.org/#detections-eval).
+ The COCO mAP numbers here are evaluated on COCO 14 minival set (note that
+ our split is different from COCO 17 Val). A full list of image ids used in
+ our split could be fould
+ [here](https://github.com/tensorflow/models/blob/master/research/object_detection/data/mscoco_minival_ids.txt).
+[^2]: This is PASCAL mAP with a slightly different way of true positives
+ computation: see
+ [Open Images evaluation protocols](evaluation_protocols.md),
+ oid_V2_detection_metrics.
+[^3]: Non-face boxes are dropped during training and non-face groundtruth boxes
+ are ignored when evaluating.
+[^4]: This is Open Images Challenge metric: see
+ [Open Images evaluation protocols](evaluation_protocols.md),
+ oid_challenge_detection_metrics.
diff --git a/research/object_detection/inference/detection_inference_test.py b/research/object_detection/inference/detection_inference_tf1_test.py
similarity index 98%
rename from research/object_detection/inference/detection_inference_test.py
rename to research/object_detection/inference/detection_inference_tf1_test.py
index 6d35f2b688d4ccfc885b854251e27d2a47c24d4b..899da1298765425c667fbcdfd341fad713724d9f 100644
--- a/research/object_detection/inference/detection_inference_test.py
+++ b/research/object_detection/inference/detection_inference_tf1_test.py
@@ -15,7 +15,7 @@
r"""Tests for detection_inference.py."""
import os
-
+import unittest
import numpy as np
from PIL import Image
import six
@@ -25,6 +25,7 @@ from google.protobuf import text_format
from object_detection.core import standard_fields
from object_detection.inference import detection_inference
from object_detection.utils import dataset_util
+from object_detection.utils import tf_version
def get_mock_tfrecord_path():
@@ -74,6 +75,7 @@ def create_mock_graph():
fl.write(graph_def.SerializeToString())
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.')
class InferDetectionsTests(tf.test.TestCase):
def test_simple(self):
diff --git a/research/object_detection/inputs.py b/research/object_detection/inputs.py
index 7512a56b15f9785b3868053e2a970bbd15801cb6..a3eb2f0bd2514a723442a568e124a579eb801794 100644
--- a/research/object_detection/inputs.py
+++ b/research/object_detection/inputs.py
@@ -64,7 +64,6 @@ def _multiclass_scores_or_one_hot_labels(multiclass_scores,
[tf.shape(groundtruth_boxes)[0], num_classes])
def false_fn():
return tf.one_hot(groundtruth_classes, num_classes)
-
return tf.cond(tf.size(multiclass_scores) > 0, true_fn, false_fn)
@@ -1006,14 +1005,21 @@ def get_reduce_to_frame_fn(input_reader_config, is_training):
`reduce_to_frame_fn` for the dataset builder
"""
if input_reader_config.input_type != (
- input_reader_pb2.InputType.TF_SEQUENCE_EXAMPLE):
- return lambda d: d
+ input_reader_pb2.InputType.Value('TF_SEQUENCE_EXAMPLE')):
+ return lambda dataset, dataset_map_fn, batch_size, config: dataset
else:
- def reduce_to_frame(dataset):
+ def reduce_to_frame(dataset, dataset_map_fn, batch_size,
+ input_reader_config):
"""Returns a function reducing sequence tensors to single frame tensors.
Args:
dataset: A tf dataset containing sequence tensors.
+ dataset_map_fn: A function that handles whether to
+ map_with_legacy_function for this dataset
+ batch_size: used if map_with_legacy_function is true to determine
+ num_parallel_calls
+ input_reader_config: used if map_with_legacy_function is true to
+ determine num_parallel_calls
Returns:
A tf dataset containing single frame tensors.
@@ -1046,13 +1052,14 @@ def get_reduce_to_frame_fn(input_reader_config, is_training):
# Copy all context tensors.
out_tensor_dict[key] = tensor_dict[key]
return out_tensor_dict
- dataset = dataset.map(get_single_frame, tf.data.experimental.AUTOTUNE)
+ dataset = dataset_map_fn(dataset, get_single_frame, batch_size,
+ input_reader_config)
else:
- dataset = dataset.map(util_ops.tile_context_tensors,
- tf.data.experimental.AUTOTUNE)
+ dataset = dataset_map_fn(dataset, util_ops.tile_context_tensors,
+ batch_size, input_reader_config)
dataset = dataset.unbatch()
# Decode frame here as SequenceExample tensors contain encoded images.
- dataset = dataset.map(util_ops.decode_image,
- tf.data.experimental.AUTOTUNE)
+ dataset = dataset_map_fn(dataset, util_ops.decode_image, batch_size,
+ input_reader_config)
return dataset
return reduce_to_frame
diff --git a/research/object_detection/inputs_test.py b/research/object_detection/inputs_test.py
index 78e268b25d1c2fd6eab22e4384b0d2172a5ff8a7..1fca6538f071d11605ef1f83db24d184d3e6ab8d 100644
--- a/research/object_detection/inputs_test.py
+++ b/research/object_detection/inputs_test.py
@@ -20,10 +20,11 @@ from __future__ import print_function
import functools
import os
+import unittest
from absl import logging
from absl.testing import parameterized
-
import numpy as np
+import six
import tensorflow.compat.v1 as tf
from object_detection import inputs
@@ -31,6 +32,13 @@ from object_detection.core import preprocessor
from object_detection.core import standard_fields as fields
from object_detection.utils import config_util
from object_detection.utils import test_case
+from object_detection.utils import test_utils
+from object_detection.utils import tf_version
+
+if six.PY2:
+ import mock # pylint: disable=g-import-not-at-top
+else:
+ from unittest import mock # pylint: disable=g-import-not-at-top, g-importing-member
FLAGS = tf.flags.FLAGS
@@ -86,7 +94,8 @@ def _make_initializable_iterator(dataset):
return iterator
-class InputsTest(test_case.TestCase, parameterized.TestCase):
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only tests under TF2.X.')
+class InputFnTest(test_case.TestCase, parameterized.TestCase):
def test_faster_rcnn_resnet50_train_input(self):
"""Tests the training input function for FasterRcnnResnet50."""
@@ -402,7 +411,7 @@ class InputsTest(test_case.TestCase, parameterized.TestCase):
def test_ssd_inceptionV2_eval_input_with_additional_channels(
self, eval_batch_size=1):
- """Tests the eval input function for SSDInceptionV2 with additional channels.
+ """Tests the eval input function for SSDInceptionV2 with additional channel.
Args:
eval_batch_size: Batch size for eval set.
@@ -638,24 +647,20 @@ class DataAugmentationFnTest(test_case.TestCase):
data_augmentation_fn = functools.partial(
inputs.augment_input_data,
data_augmentation_options=data_augmentation_options)
- tensor_dict = {
- fields.InputDataFields.image:
- tf.constant(np.random.rand(10, 10, 3).astype(np.float32)),
- fields.InputDataFields.groundtruth_boxes:
- tf.constant(np.array([[.5, .5, 1., 1.]], np.float32))
- }
- augmented_tensor_dict = data_augmentation_fn(tensor_dict=tensor_dict)
- with self.test_session() as sess:
- augmented_tensor_dict_out = sess.run(augmented_tensor_dict)
-
- self.assertAllEqual(
- augmented_tensor_dict_out[fields.InputDataFields.image].shape,
- [20, 20, 3]
- )
- self.assertAllClose(
- augmented_tensor_dict_out[fields.InputDataFields.groundtruth_boxes],
- [[10, 10, 20, 20]]
- )
+ def graph_fn():
+ tensor_dict = {
+ fields.InputDataFields.image:
+ tf.constant(np.random.rand(10, 10, 3).astype(np.float32)),
+ fields.InputDataFields.groundtruth_boxes:
+ tf.constant(np.array([[.5, .5, 1., 1.]], np.float32))
+ }
+ augmented_tensor_dict = data_augmentation_fn(tensor_dict=tensor_dict)
+ return (augmented_tensor_dict[fields.InputDataFields.image],
+ augmented_tensor_dict[fields.InputDataFields.
+ groundtruth_boxes])
+ image, groundtruth_boxes = self.execute_cpu(graph_fn, [])
+ self.assertAllEqual(image.shape, [20, 20, 3])
+ self.assertAllClose(groundtruth_boxes, [[10, 10, 20, 20]])
def test_apply_image_and_box_augmentation_with_scores(self):
data_augmentation_options = [
@@ -669,37 +674,28 @@ class DataAugmentationFnTest(test_case.TestCase):
data_augmentation_fn = functools.partial(
inputs.augment_input_data,
data_augmentation_options=data_augmentation_options)
- tensor_dict = {
- fields.InputDataFields.image:
- tf.constant(np.random.rand(10, 10, 3).astype(np.float32)),
- fields.InputDataFields.groundtruth_boxes:
- tf.constant(np.array([[.5, .5, 1., 1.]], np.float32)),
- fields.InputDataFields.groundtruth_classes:
- tf.constant(np.array([1.0], np.float32)),
- fields.InputDataFields.groundtruth_weights:
- tf.constant(np.array([0.8], np.float32)),
- }
- augmented_tensor_dict = data_augmentation_fn(tensor_dict=tensor_dict)
- with self.test_session() as sess:
- augmented_tensor_dict_out = sess.run(augmented_tensor_dict)
-
- self.assertAllEqual(
- augmented_tensor_dict_out[fields.InputDataFields.image].shape,
- [20, 20, 3]
- )
- self.assertAllClose(
- augmented_tensor_dict_out[fields.InputDataFields.groundtruth_boxes],
- [[10, 10, 20, 20]]
- )
- self.assertAllClose(
- augmented_tensor_dict_out[fields.InputDataFields.groundtruth_classes],
- [1.0]
- )
- self.assertAllClose(
- augmented_tensor_dict_out[
- fields.InputDataFields.groundtruth_weights],
- [0.8]
- )
+ def graph_fn():
+ tensor_dict = {
+ fields.InputDataFields.image:
+ tf.constant(np.random.rand(10, 10, 3).astype(np.float32)),
+ fields.InputDataFields.groundtruth_boxes:
+ tf.constant(np.array([[.5, .5, 1., 1.]], np.float32)),
+ fields.InputDataFields.groundtruth_classes:
+ tf.constant(np.array([1.0], np.float32)),
+ fields.InputDataFields.groundtruth_weights:
+ tf.constant(np.array([0.8], np.float32)),
+ }
+ augmented_tensor_dict = data_augmentation_fn(tensor_dict=tensor_dict)
+ return (augmented_tensor_dict[fields.InputDataFields.image],
+ augmented_tensor_dict[fields.InputDataFields.groundtruth_boxes],
+ augmented_tensor_dict[fields.InputDataFields.groundtruth_classes],
+ augmented_tensor_dict[fields.InputDataFields.groundtruth_weights])
+ (image, groundtruth_boxes,
+ groundtruth_classes, groundtruth_weights) = self.execute_cpu(graph_fn, [])
+ self.assertAllEqual(image.shape, [20, 20, 3])
+ self.assertAllClose(groundtruth_boxes, [[10, 10, 20, 20]])
+ self.assertAllClose(groundtruth_classes.shape, [1.0])
+ self.assertAllClose(groundtruth_weights, [0.8])
def test_include_masks_in_data_augmentation(self):
data_augmentation_options = [
@@ -712,21 +708,20 @@ class DataAugmentationFnTest(test_case.TestCase):
data_augmentation_fn = functools.partial(
inputs.augment_input_data,
data_augmentation_options=data_augmentation_options)
- tensor_dict = {
- fields.InputDataFields.image:
- tf.constant(np.random.rand(10, 10, 3).astype(np.float32)),
- fields.InputDataFields.groundtruth_instance_masks:
- tf.constant(np.zeros([2, 10, 10], np.uint8))
- }
- augmented_tensor_dict = data_augmentation_fn(tensor_dict=tensor_dict)
- with self.test_session() as sess:
- augmented_tensor_dict_out = sess.run(augmented_tensor_dict)
-
- self.assertAllEqual(
- augmented_tensor_dict_out[fields.InputDataFields.image].shape,
- [20, 20, 3])
- self.assertAllEqual(augmented_tensor_dict_out[
- fields.InputDataFields.groundtruth_instance_masks].shape, [2, 20, 20])
+ def graph_fn():
+ tensor_dict = {
+ fields.InputDataFields.image:
+ tf.constant(np.random.rand(10, 10, 3).astype(np.float32)),
+ fields.InputDataFields.groundtruth_instance_masks:
+ tf.constant(np.zeros([2, 10, 10], np.uint8))
+ }
+ augmented_tensor_dict = data_augmentation_fn(tensor_dict=tensor_dict)
+ return (augmented_tensor_dict[fields.InputDataFields.image],
+ augmented_tensor_dict[fields.InputDataFields.
+ groundtruth_instance_masks])
+ image, masks = self.execute_cpu(graph_fn, [])
+ self.assertAllEqual(image.shape, [20, 20, 3])
+ self.assertAllEqual(masks.shape, [2, 20, 20])
def test_include_keypoints_in_data_augmentation(self):
data_augmentation_options = [
@@ -740,30 +735,24 @@ class DataAugmentationFnTest(test_case.TestCase):
data_augmentation_fn = functools.partial(
inputs.augment_input_data,
data_augmentation_options=data_augmentation_options)
- tensor_dict = {
- fields.InputDataFields.image:
- tf.constant(np.random.rand(10, 10, 3).astype(np.float32)),
- fields.InputDataFields.groundtruth_boxes:
- tf.constant(np.array([[.5, .5, 1., 1.]], np.float32)),
- fields.InputDataFields.groundtruth_keypoints:
- tf.constant(np.array([[[0.5, 1.0], [0.5, 0.5]]], np.float32))
- }
- augmented_tensor_dict = data_augmentation_fn(tensor_dict=tensor_dict)
- with self.test_session() as sess:
- augmented_tensor_dict_out = sess.run(augmented_tensor_dict)
-
- self.assertAllEqual(
- augmented_tensor_dict_out[fields.InputDataFields.image].shape,
- [20, 20, 3]
- )
- self.assertAllClose(
- augmented_tensor_dict_out[fields.InputDataFields.groundtruth_boxes],
- [[10, 10, 20, 20]]
- )
- self.assertAllClose(
- augmented_tensor_dict_out[fields.InputDataFields.groundtruth_keypoints],
- [[[10, 20], [10, 10]]]
- )
+ def graph_fn():
+ tensor_dict = {
+ fields.InputDataFields.image:
+ tf.constant(np.random.rand(10, 10, 3).astype(np.float32)),
+ fields.InputDataFields.groundtruth_boxes:
+ tf.constant(np.array([[.5, .5, 1., 1.]], np.float32)),
+ fields.InputDataFields.groundtruth_keypoints:
+ tf.constant(np.array([[[0.5, 1.0], [0.5, 0.5]]], np.float32))
+ }
+ augmented_tensor_dict = data_augmentation_fn(tensor_dict=tensor_dict)
+ return (augmented_tensor_dict[fields.InputDataFields.image],
+ augmented_tensor_dict[fields.InputDataFields.groundtruth_boxes],
+ augmented_tensor_dict[fields.InputDataFields.
+ groundtruth_keypoints])
+ image, boxes, keypoints = self.execute_cpu(graph_fn, [])
+ self.assertAllEqual(image.shape, [20, 20, 3])
+ self.assertAllClose(boxes, [[10, 10, 20, 20]])
+ self.assertAllClose(keypoints, [[[10, 20], [10, 10]]])
def _fake_model_preprocessor_fn(image):
@@ -787,85 +776,82 @@ class DataTransformationFnTest(test_case.TestCase, parameterized.TestCase):
def test_combine_additional_channels_if_present(self):
image = np.random.rand(4, 4, 3).astype(np.float32)
additional_channels = np.random.rand(4, 4, 2).astype(np.float32)
- tensor_dict = {
- fields.InputDataFields.image:
- tf.constant(image),
- fields.InputDataFields.image_additional_channels:
- tf.constant(additional_channels),
- fields.InputDataFields.groundtruth_classes:
- tf.constant(np.array([1, 1], np.int32))
- }
+ def graph_fn(image, additional_channels):
+ tensor_dict = {
+ fields.InputDataFields.image: image,
+ fields.InputDataFields.image_additional_channels: additional_channels,
+ fields.InputDataFields.groundtruth_classes:
+ tf.constant([1, 1], tf.int32)
+ }
- input_transformation_fn = functools.partial(
- inputs.transform_input_data,
- model_preprocess_fn=_fake_model_preprocessor_fn,
- image_resizer_fn=_fake_image_resizer_fn,
- num_classes=1)
- with self.test_session() as sess:
- transformed_inputs = sess.run(
- input_transformation_fn(tensor_dict=tensor_dict))
- self.assertAllEqual(transformed_inputs[fields.InputDataFields.image].dtype,
- tf.float32)
- self.assertAllEqual(transformed_inputs[fields.InputDataFields.image].shape,
- [4, 4, 5])
- self.assertAllClose(transformed_inputs[fields.InputDataFields.image],
- np.concatenate((image, additional_channels), axis=2))
+ input_transformation_fn = functools.partial(
+ inputs.transform_input_data,
+ model_preprocess_fn=_fake_model_preprocessor_fn,
+ image_resizer_fn=_fake_image_resizer_fn,
+ num_classes=1)
+ out_tensors = input_transformation_fn(tensor_dict=tensor_dict)
+ return out_tensors[fields.InputDataFields.image]
+ out_image = self.execute_cpu(graph_fn, [image, additional_channels])
+ self.assertAllEqual(out_image.dtype, tf.float32)
+ self.assertAllEqual(out_image.shape, [4, 4, 5])
+ self.assertAllClose(out_image, np.concatenate((image, additional_channels),
+ axis=2))
def test_use_multiclass_scores_when_present(self):
- image = np.random.rand(4, 4, 3).astype(np.float32)
- tensor_dict = {
- fields.InputDataFields.image:
- tf.constant(image),
- fields.InputDataFields.groundtruth_boxes:
- tf.constant(np.array([[.5, .5, 1, 1], [.5, .5, 1, 1]], np.float32)),
- fields.InputDataFields.multiclass_scores:
- tf.constant(np.array([0.2, 0.3, 0.5, 0.1, 0.6, 0.3], np.float32)),
- fields.InputDataFields.groundtruth_classes:
- tf.constant(np.array([1, 2], np.int32))
- }
-
- input_transformation_fn = functools.partial(
- inputs.transform_input_data,
- model_preprocess_fn=_fake_model_preprocessor_fn,
- image_resizer_fn=_fake_image_resizer_fn,
- num_classes=3, use_multiclass_scores=True)
- with self.test_session() as sess:
- transformed_inputs = sess.run(
- input_transformation_fn(tensor_dict=tensor_dict))
+ def graph_fn():
+ tensor_dict = {
+ fields.InputDataFields.image: tf.constant(np.random.rand(4, 4, 3).
+ astype(np.float32)),
+ fields.InputDataFields.groundtruth_boxes:
+ tf.constant(np.array([[.5, .5, 1, 1], [.5, .5, 1, 1]],
+ np.float32)),
+ fields.InputDataFields.multiclass_scores:
+ tf.constant(np.array([0.2, 0.3, 0.5, 0.1, 0.6, 0.3], np.float32)),
+ fields.InputDataFields.groundtruth_classes:
+ tf.constant(np.array([1, 2], np.int32))
+ }
+ input_transformation_fn = functools.partial(
+ inputs.transform_input_data,
+ model_preprocess_fn=_fake_model_preprocessor_fn,
+ image_resizer_fn=_fake_image_resizer_fn,
+ num_classes=3, use_multiclass_scores=True)
+ transformed_inputs = input_transformation_fn(tensor_dict=tensor_dict)
+ return transformed_inputs[fields.InputDataFields.groundtruth_classes]
+ groundtruth_classes = self.execute_cpu(graph_fn, [])
self.assertAllClose(
np.array([[0.2, 0.3, 0.5], [0.1, 0.6, 0.3]], np.float32),
- transformed_inputs[fields.InputDataFields.groundtruth_classes])
+ groundtruth_classes)
+ @unittest.skipIf(tf_version.is_tf2(), ('Skipping due to different behaviour '
+ 'in TF 2.X'))
def test_use_multiclass_scores_when_not_present(self):
- image = np.random.rand(4, 4, 3).astype(np.float32)
- tensor_dict = {
- fields.InputDataFields.image:
- tf.constant(image),
- fields.InputDataFields.groundtruth_boxes:
- tf.constant(np.array([[.5, .5, 1, 1], [.5, .5, 1, 1]], np.float32)),
- fields.InputDataFields.multiclass_scores:
- tf.placeholder(tf.float32),
- fields.InputDataFields.groundtruth_classes:
- tf.constant(np.array([1, 2], np.int32))
- }
+ def graph_fn():
+ zero_num_elements = tf.random.uniform([], minval=0, maxval=1,
+ dtype=tf.int32)
+ tensor_dict = {
+ fields.InputDataFields.image:
+ tf.constant(np.random.rand(4, 4, 3).astype(np.float32)),
+ fields.InputDataFields.groundtruth_boxes:
+ tf.constant(np.array([[.5, .5, 1, 1], [.5, .5, 1, 1]],
+ np.float32)),
+ fields.InputDataFields.multiclass_scores: tf.zeros(zero_num_elements),
+ fields.InputDataFields.groundtruth_classes:
+ tf.constant(np.array([1, 2], np.int32))
+ }
- input_transformation_fn = functools.partial(
- inputs.transform_input_data,
- model_preprocess_fn=_fake_model_preprocessor_fn,
- image_resizer_fn=_fake_image_resizer_fn,
- num_classes=3, use_multiclass_scores=True)
- with self.test_session() as sess:
- transformed_inputs = sess.run(
- input_transformation_fn(tensor_dict=tensor_dict),
- feed_dict={
- tensor_dict[fields.InputDataFields.multiclass_scores]:
- np.array([], dtype=np.float32)
- })
+ input_transformation_fn = functools.partial(
+ inputs.transform_input_data,
+ model_preprocess_fn=_fake_model_preprocessor_fn,
+ image_resizer_fn=_fake_image_resizer_fn,
+ num_classes=3, use_multiclass_scores=True)
+ transformed_inputs = input_transformation_fn(tensor_dict=tensor_dict)
+ return transformed_inputs[fields.InputDataFields.groundtruth_classes]
+ groundtruth_classes = self.execute_cpu(graph_fn, [])
self.assertAllClose(
np.array([[0, 1, 0], [0, 0, 1]], np.float32),
- transformed_inputs[fields.InputDataFields.groundtruth_classes])
+ groundtruth_classes)
@parameterized.parameters(
{'labeled_classes': [1, 2]},
@@ -916,385 +902,395 @@ class DataTransformationFnTest(test_case.TestCase, parameterized.TestCase):
transformed_inputs[fields.InputDataFields.groundtruth_labeled_classes])
def test_returns_correct_class_label_encodings(self):
- tensor_dict = {
- fields.InputDataFields.image:
- tf.constant(np.random.rand(4, 4, 3).astype(np.float32)),
- fields.InputDataFields.groundtruth_boxes:
- tf.constant(np.array([[0, 0, 1, 1], [.5, .5, 1, 1]], np.float32)),
- fields.InputDataFields.groundtruth_classes:
- tf.constant(np.array([3, 1], np.int32))
- }
- num_classes = 3
- input_transformation_fn = functools.partial(
- inputs.transform_input_data,
- model_preprocess_fn=_fake_model_preprocessor_fn,
- image_resizer_fn=_fake_image_resizer_fn,
- num_classes=num_classes)
- with self.test_session() as sess:
- transformed_inputs = sess.run(
- input_transformation_fn(tensor_dict=tensor_dict))
-
- self.assertAllClose(
- transformed_inputs[fields.InputDataFields.groundtruth_classes],
- [[0, 0, 1], [1, 0, 0]])
- self.assertAllClose(
- transformed_inputs[fields.InputDataFields.groundtruth_confidences],
- [[0, 0, 1], [1, 0, 0]])
+ def graph_fn():
+ tensor_dict = {
+ fields.InputDataFields.image:
+ tf.constant(np.random.rand(4, 4, 3).astype(np.float32)),
+ fields.InputDataFields.groundtruth_boxes:
+ tf.constant(np.array([[0, 0, 1, 1], [.5, .5, 1, 1]], np.float32)),
+ fields.InputDataFields.groundtruth_classes:
+ tf.constant(np.array([3, 1], np.int32))
+ }
+ num_classes = 3
+ input_transformation_fn = functools.partial(
+ inputs.transform_input_data,
+ model_preprocess_fn=_fake_model_preprocessor_fn,
+ image_resizer_fn=_fake_image_resizer_fn,
+ num_classes=num_classes)
+ transformed_inputs = input_transformation_fn(tensor_dict=tensor_dict)
+ return (transformed_inputs[fields.InputDataFields.groundtruth_classes],
+ transformed_inputs[fields.InputDataFields.
+ groundtruth_confidences])
+ (groundtruth_classes, groundtruth_confidences) = self.execute_cpu(graph_fn,
+ [])
+ self.assertAllClose(groundtruth_classes, [[0, 0, 1], [1, 0, 0]])
+ self.assertAllClose(groundtruth_confidences, [[0, 0, 1], [1, 0, 0]])
def test_returns_correct_labels_with_unrecognized_class(self):
- tensor_dict = {
- fields.InputDataFields.image:
- tf.constant(np.random.rand(4, 4, 3).astype(np.float32)),
- fields.InputDataFields.groundtruth_boxes:
- tf.constant(
- np.array([[0, 0, 1, 1], [.2, .2, 4, 4], [.5, .5, 1, 1]],
- np.float32)),
- fields.InputDataFields.groundtruth_area:
- tf.constant(np.array([.5, .4, .3])),
- fields.InputDataFields.groundtruth_classes:
- tf.constant(np.array([3, -1, 1], np.int32)),
- fields.InputDataFields.groundtruth_keypoints:
- tf.constant(
- np.array([[[.1, .1]], [[.2, .2]], [[.5, .5]]],
- np.float32)),
- fields.InputDataFields.groundtruth_keypoint_visibilities:
- tf.constant([[True, True], [False, False], [True, True]]),
- fields.InputDataFields.groundtruth_instance_masks:
- tf.constant(np.random.rand(3, 4, 4).astype(np.float32)),
- fields.InputDataFields.groundtruth_is_crowd:
- tf.constant([False, True, False]),
- fields.InputDataFields.groundtruth_difficult:
- tf.constant(np.array([0, 0, 1], np.int32))
- }
-
- num_classes = 3
- input_transformation_fn = functools.partial(
- inputs.transform_input_data,
- model_preprocess_fn=_fake_model_preprocessor_fn,
- image_resizer_fn=_fake_image_resizer_fn,
- num_classes=num_classes)
- with self.test_session() as sess:
- transformed_inputs = sess.run(
- input_transformation_fn(tensor_dict=tensor_dict))
+ def graph_fn():
+ tensor_dict = {
+ fields.InputDataFields.image:
+ tf.constant(np.random.rand(4, 4, 3).astype(np.float32)),
+ fields.InputDataFields.groundtruth_boxes:
+ tf.constant(
+ np.array([[0, 0, 1, 1], [.2, .2, 4, 4], [.5, .5, 1, 1]],
+ np.float32)),
+ fields.InputDataFields.groundtruth_area:
+ tf.constant(np.array([.5, .4, .3])),
+ fields.InputDataFields.groundtruth_classes:
+ tf.constant(np.array([3, -1, 1], np.int32)),
+ fields.InputDataFields.groundtruth_keypoints:
+ tf.constant(
+ np.array([[[.1, .1]], [[.2, .2]], [[.5, .5]]],
+ np.float32)),
+ fields.InputDataFields.groundtruth_keypoint_visibilities:
+ tf.constant([[True, True], [False, False], [True, True]]),
+ fields.InputDataFields.groundtruth_instance_masks:
+ tf.constant(np.random.rand(3, 4, 4).astype(np.float32)),
+ fields.InputDataFields.groundtruth_is_crowd:
+ tf.constant([False, True, False]),
+ fields.InputDataFields.groundtruth_difficult:
+ tf.constant(np.array([0, 0, 1], np.int32))
+ }
- self.assertAllClose(
- transformed_inputs[fields.InputDataFields.groundtruth_classes],
- [[0, 0, 1], [1, 0, 0]])
- self.assertAllEqual(
- transformed_inputs[fields.InputDataFields.num_groundtruth_boxes], 2)
- self.assertAllClose(
- transformed_inputs[fields.InputDataFields.groundtruth_area], [.5, .3])
- self.assertAllEqual(
- transformed_inputs[fields.InputDataFields.groundtruth_confidences],
- [[0, 0, 1], [1, 0, 0]])
- self.assertAllClose(
- transformed_inputs[fields.InputDataFields.groundtruth_boxes],
- [[0, 0, 1, 1], [.5, .5, 1, 1]])
- self.assertAllClose(
- transformed_inputs[fields.InputDataFields.groundtruth_keypoints],
- [[[.1, .1]], [[.5, .5]]])
- self.assertAllEqual(
- transformed_inputs[
- fields.InputDataFields.groundtruth_keypoint_visibilities],
- [[True, True], [True, True]])
- self.assertAllEqual(
- transformed_inputs[
- fields.InputDataFields.groundtruth_instance_masks].shape, [2, 4, 4])
- self.assertAllEqual(
- transformed_inputs[fields.InputDataFields.groundtruth_is_crowd],
- [False, False])
- self.assertAllEqual(
- transformed_inputs[fields.InputDataFields.groundtruth_difficult],
- [0, 1])
+ num_classes = 3
+ input_transformation_fn = functools.partial(
+ inputs.transform_input_data,
+ model_preprocess_fn=_fake_model_preprocessor_fn,
+ image_resizer_fn=_fake_image_resizer_fn,
+ num_classes=num_classes)
+ transformed_inputs = input_transformation_fn(tensor_dict)
+ return (transformed_inputs[fields.InputDataFields.groundtruth_classes],
+ transformed_inputs[fields.InputDataFields.num_groundtruth_boxes],
+ transformed_inputs[fields.InputDataFields.groundtruth_area],
+ transformed_inputs[fields.InputDataFields.
+ groundtruth_confidences],
+ transformed_inputs[fields.InputDataFields.groundtruth_boxes],
+ transformed_inputs[fields.InputDataFields.groundtruth_keypoints],
+ transformed_inputs[fields.InputDataFields.
+ groundtruth_keypoint_visibilities],
+ transformed_inputs[fields.InputDataFields.
+ groundtruth_instance_masks],
+ transformed_inputs[fields.InputDataFields.groundtruth_is_crowd],
+ transformed_inputs[fields.InputDataFields.groundtruth_difficult])
+ (groundtruth_classes, num_groundtruth_boxes, groundtruth_area,
+ groundtruth_confidences, groundtruth_boxes, groundtruth_keypoints,
+ groundtruth_keypoint_visibilities, groundtruth_instance_masks,
+ groundtruth_is_crowd, groundtruth_difficult) = self.execute_cpu(graph_fn,
+ [])
+
+ self.assertAllClose(groundtruth_classes, [[0, 0, 1], [1, 0, 0]])
+ self.assertAllEqual(num_groundtruth_boxes, 2)
+ self.assertAllClose(groundtruth_area, [.5, .3])
+ self.assertAllEqual(groundtruth_confidences, [[0, 0, 1], [1, 0, 0]])
+ self.assertAllClose(groundtruth_boxes, [[0, 0, 1, 1], [.5, .5, 1, 1]])
+ self.assertAllClose(groundtruth_keypoints, [[[.1, .1]], [[.5, .5]]])
+ self.assertAllEqual(groundtruth_keypoint_visibilities,
+ [[True, True], [True, True]])
+ self.assertAllEqual(groundtruth_instance_masks.shape, [2, 4, 4])
+ self.assertAllEqual(groundtruth_is_crowd, [False, False])
+ self.assertAllEqual(groundtruth_difficult, [0, 1])
def test_returns_correct_merged_boxes(self):
- tensor_dict = {
- fields.InputDataFields.image:
- tf.constant(np.random.rand(4, 4, 3).astype(np.float32)),
- fields.InputDataFields.groundtruth_boxes:
- tf.constant(np.array([[.5, .5, 1, 1], [.5, .5, 1, 1]], np.float32)),
- fields.InputDataFields.groundtruth_classes:
- tf.constant(np.array([3, 1], np.int32))
- }
-
- num_classes = 3
- input_transformation_fn = functools.partial(
- inputs.transform_input_data,
- model_preprocess_fn=_fake_model_preprocessor_fn,
- image_resizer_fn=_fake_image_resizer_fn,
- num_classes=num_classes,
- merge_multiple_boxes=True)
+ def graph_fn():
+ tensor_dict = {
+ fields.InputDataFields.image:
+ tf.constant(np.random.rand(4, 4, 3).astype(np.float32)),
+ fields.InputDataFields.groundtruth_boxes:
+ tf.constant(np.array([[.5, .5, 1, 1], [.5, .5, 1, 1]],
+ np.float32)),
+ fields.InputDataFields.groundtruth_classes:
+ tf.constant(np.array([3, 1], np.int32))
+ }
- with self.test_session() as sess:
- transformed_inputs = sess.run(
- input_transformation_fn(tensor_dict=tensor_dict))
+ num_classes = 3
+ input_transformation_fn = functools.partial(
+ inputs.transform_input_data,
+ model_preprocess_fn=_fake_model_preprocessor_fn,
+ image_resizer_fn=_fake_image_resizer_fn,
+ num_classes=num_classes,
+ merge_multiple_boxes=True)
+ transformed_inputs = input_transformation_fn(tensor_dict)
+ return (transformed_inputs[fields.InputDataFields.groundtruth_boxes],
+ transformed_inputs[fields.InputDataFields.groundtruth_classes],
+ transformed_inputs[fields.InputDataFields.
+ groundtruth_confidences],
+ transformed_inputs[fields.InputDataFields.num_groundtruth_boxes])
+ (groundtruth_boxes, groundtruth_classes, groundtruth_confidences,
+ num_groundtruth_boxes) = self.execute_cpu(graph_fn, [])
self.assertAllClose(
- transformed_inputs[fields.InputDataFields.groundtruth_boxes],
+ groundtruth_boxes,
[[.5, .5, 1., 1.]])
self.assertAllClose(
- transformed_inputs[fields.InputDataFields.groundtruth_classes],
+ groundtruth_classes,
[[1, 0, 1]])
self.assertAllClose(
- transformed_inputs[fields.InputDataFields.groundtruth_confidences],
+ groundtruth_confidences,
[[1, 0, 1]])
self.assertAllClose(
- transformed_inputs[fields.InputDataFields.num_groundtruth_boxes],
+ num_groundtruth_boxes,
1)
def test_returns_correct_groundtruth_confidences_when_input_present(self):
- tensor_dict = {
- fields.InputDataFields.image:
- tf.constant(np.random.rand(4, 4, 3).astype(np.float32)),
- fields.InputDataFields.groundtruth_boxes:
- tf.constant(np.array([[0, 0, 1, 1], [.5, .5, 1, 1]], np.float32)),
- fields.InputDataFields.groundtruth_classes:
- tf.constant(np.array([3, 1], np.int32)),
- fields.InputDataFields.groundtruth_confidences:
- tf.constant(np.array([1.0, -1.0], np.float32))
- }
- num_classes = 3
- input_transformation_fn = functools.partial(
- inputs.transform_input_data,
- model_preprocess_fn=_fake_model_preprocessor_fn,
- image_resizer_fn=_fake_image_resizer_fn,
- num_classes=num_classes)
- with self.test_session() as sess:
- transformed_inputs = sess.run(
- input_transformation_fn(tensor_dict=tensor_dict))
-
+ def graph_fn():
+ tensor_dict = {
+ fields.InputDataFields.image:
+ tf.constant(np.random.rand(4, 4, 3).astype(np.float32)),
+ fields.InputDataFields.groundtruth_boxes:
+ tf.constant(np.array([[0, 0, 1, 1], [.5, .5, 1, 1]], np.float32)),
+ fields.InputDataFields.groundtruth_classes:
+ tf.constant(np.array([3, 1], np.int32)),
+ fields.InputDataFields.groundtruth_confidences:
+ tf.constant(np.array([1.0, -1.0], np.float32))
+ }
+ num_classes = 3
+ input_transformation_fn = functools.partial(
+ inputs.transform_input_data,
+ model_preprocess_fn=_fake_model_preprocessor_fn,
+ image_resizer_fn=_fake_image_resizer_fn,
+ num_classes=num_classes)
+ transformed_inputs = input_transformation_fn(tensor_dict)
+ return (transformed_inputs[fields.InputDataFields.groundtruth_classes],
+ transformed_inputs[fields.InputDataFields.
+ groundtruth_confidences])
+ groundtruth_classes, groundtruth_confidences = self.execute_cpu(graph_fn,
+ [])
self.assertAllClose(
- transformed_inputs[fields.InputDataFields.groundtruth_classes],
+ groundtruth_classes,
[[0, 0, 1], [1, 0, 0]])
self.assertAllClose(
- transformed_inputs[fields.InputDataFields.groundtruth_confidences],
+ groundtruth_confidences,
[[0, 0, 1], [-1, 0, 0]])
def test_returns_resized_masks(self):
- tensor_dict = {
- fields.InputDataFields.image:
- tf.constant(np.random.rand(4, 4, 3).astype(np.float32)),
- fields.InputDataFields.groundtruth_instance_masks:
- tf.constant(np.random.rand(2, 4, 4).astype(np.float32)),
- fields.InputDataFields.groundtruth_classes:
- tf.constant(np.array([3, 1], np.int32)),
- fields.InputDataFields.original_image_spatial_shape:
- tf.constant(np.array([4, 4], np.int32))
- }
+ def graph_fn():
+ tensor_dict = {
+ fields.InputDataFields.image:
+ tf.constant(np.random.rand(4, 4, 3).astype(np.float32)),
+ fields.InputDataFields.groundtruth_instance_masks:
+ tf.constant(np.random.rand(2, 4, 4).astype(np.float32)),
+ fields.InputDataFields.groundtruth_classes:
+ tf.constant(np.array([3, 1], np.int32)),
+ fields.InputDataFields.original_image_spatial_shape:
+ tf.constant(np.array([4, 4], np.int32))
+ }
- def fake_image_resizer_fn(image, masks=None):
- resized_image = tf.image.resize_images(image, [8, 8])
- results = [resized_image]
- if masks is not None:
- resized_masks = tf.transpose(
- tf.image.resize_images(tf.transpose(masks, [1, 2, 0]), [8, 8]),
- [2, 0, 1])
- results.append(resized_masks)
- results.append(tf.shape(resized_image))
- return results
-
- num_classes = 3
- input_transformation_fn = functools.partial(
- inputs.transform_input_data,
- model_preprocess_fn=_fake_model_preprocessor_fn,
- image_resizer_fn=fake_image_resizer_fn,
- num_classes=num_classes,
- retain_original_image=True)
- with self.test_session() as sess:
- transformed_inputs = sess.run(
- input_transformation_fn(tensor_dict=tensor_dict))
- self.assertAllEqual(transformed_inputs[
- fields.InputDataFields.original_image].dtype, tf.uint8)
- self.assertAllEqual(transformed_inputs[
- fields.InputDataFields.original_image_spatial_shape], [4, 4])
- self.assertAllEqual(transformed_inputs[
- fields.InputDataFields.original_image].shape, [8, 8, 3])
- self.assertAllEqual(transformed_inputs[
- fields.InputDataFields.groundtruth_instance_masks].shape, [2, 8, 8])
+ def fake_image_resizer_fn(image, masks=None):
+ resized_image = tf.image.resize_images(image, [8, 8])
+ results = [resized_image]
+ if masks is not None:
+ resized_masks = tf.transpose(
+ tf.image.resize_images(tf.transpose(masks, [1, 2, 0]), [8, 8]),
+ [2, 0, 1])
+ results.append(resized_masks)
+ results.append(tf.shape(resized_image))
+ return results
+
+ num_classes = 3
+ input_transformation_fn = functools.partial(
+ inputs.transform_input_data,
+ model_preprocess_fn=_fake_model_preprocessor_fn,
+ image_resizer_fn=fake_image_resizer_fn,
+ num_classes=num_classes,
+ retain_original_image=True)
+ transformed_inputs = input_transformation_fn(tensor_dict)
+ return (transformed_inputs[fields.InputDataFields.original_image],
+ transformed_inputs[fields.InputDataFields.
+ original_image_spatial_shape],
+ transformed_inputs[fields.InputDataFields.
+ groundtruth_instance_masks])
+ (original_image, original_image_shape,
+ groundtruth_instance_masks) = self.execute_cpu(graph_fn, [])
+ self.assertEqual(original_image.dtype, np.uint8)
+ self.assertAllEqual(original_image_shape, [4, 4])
+ self.assertAllEqual(original_image.shape, [8, 8, 3])
+ self.assertAllEqual(groundtruth_instance_masks.shape, [2, 8, 8])
def test_applies_model_preprocess_fn_to_image_tensor(self):
np_image = np.random.randint(256, size=(4, 4, 3))
- tensor_dict = {
- fields.InputDataFields.image:
- tf.constant(np_image),
- fields.InputDataFields.groundtruth_classes:
- tf.constant(np.array([3, 1], np.int32))
- }
-
- def fake_model_preprocessor_fn(image):
- return (image / 255., tf.expand_dims(tf.shape(image)[1:], axis=0))
+ def graph_fn(image):
+ tensor_dict = {
+ fields.InputDataFields.image: image,
+ fields.InputDataFields.groundtruth_classes:
+ tf.constant(np.array([3, 1], np.int32))
+ }
- num_classes = 3
- input_transformation_fn = functools.partial(
- inputs.transform_input_data,
- model_preprocess_fn=fake_model_preprocessor_fn,
- image_resizer_fn=_fake_image_resizer_fn,
- num_classes=num_classes)
+ def fake_model_preprocessor_fn(image):
+ return (image / 255., tf.expand_dims(tf.shape(image)[1:], axis=0))
- with self.test_session() as sess:
- transformed_inputs = sess.run(
- input_transformation_fn(tensor_dict=tensor_dict))
- self.assertAllClose(transformed_inputs[fields.InputDataFields.image],
- np_image / 255.)
- self.assertAllClose(transformed_inputs[fields.InputDataFields.
- true_image_shape],
- [4, 4, 3])
+ num_classes = 3
+ input_transformation_fn = functools.partial(
+ inputs.transform_input_data,
+ model_preprocess_fn=fake_model_preprocessor_fn,
+ image_resizer_fn=_fake_image_resizer_fn,
+ num_classes=num_classes)
+ transformed_inputs = input_transformation_fn(tensor_dict)
+ return (transformed_inputs[fields.InputDataFields.image],
+ transformed_inputs[fields.InputDataFields.true_image_shape])
+ image, true_image_shape = self.execute_cpu(graph_fn, [np_image])
+ self.assertAllClose(image, np_image / 255.)
+ self.assertAllClose(true_image_shape, [4, 4, 3])
def test_applies_data_augmentation_fn_to_tensor_dict(self):
np_image = np.random.randint(256, size=(4, 4, 3))
- tensor_dict = {
- fields.InputDataFields.image:
- tf.constant(np_image),
- fields.InputDataFields.groundtruth_classes:
- tf.constant(np.array([3, 1], np.int32))
- }
-
- def add_one_data_augmentation_fn(tensor_dict):
- return {key: value + 1 for key, value in tensor_dict.items()}
+ def graph_fn(image):
+ tensor_dict = {
+ fields.InputDataFields.image: image,
+ fields.InputDataFields.groundtruth_classes:
+ tf.constant(np.array([3, 1], np.int32))
+ }
- num_classes = 4
- input_transformation_fn = functools.partial(
- inputs.transform_input_data,
- model_preprocess_fn=_fake_model_preprocessor_fn,
- image_resizer_fn=_fake_image_resizer_fn,
- num_classes=num_classes,
- data_augmentation_fn=add_one_data_augmentation_fn)
- with self.test_session() as sess:
- augmented_tensor_dict = sess.run(
- input_transformation_fn(tensor_dict=tensor_dict))
+ def add_one_data_augmentation_fn(tensor_dict):
+ return {key: value + 1 for key, value in tensor_dict.items()}
- self.assertAllEqual(augmented_tensor_dict[fields.InputDataFields.image],
- np_image + 1)
- self.assertAllEqual(
- augmented_tensor_dict[fields.InputDataFields.groundtruth_classes],
+ num_classes = 4
+ input_transformation_fn = functools.partial(
+ inputs.transform_input_data,
+ model_preprocess_fn=_fake_model_preprocessor_fn,
+ image_resizer_fn=_fake_image_resizer_fn,
+ num_classes=num_classes,
+ data_augmentation_fn=add_one_data_augmentation_fn)
+ transformed_inputs = input_transformation_fn(tensor_dict)
+ return (transformed_inputs[fields.InputDataFields.image],
+ transformed_inputs[fields.InputDataFields.groundtruth_classes])
+ image, groundtruth_classes = self.execute_cpu(graph_fn, [np_image])
+ self.assertAllEqual(image, np_image + 1)
+ self.assertAllEqual(
+ groundtruth_classes,
[[0, 0, 0, 1], [0, 1, 0, 0]])
def test_applies_data_augmentation_fn_before_model_preprocess_fn(self):
np_image = np.random.randint(256, size=(4, 4, 3))
- tensor_dict = {
- fields.InputDataFields.image:
- tf.constant(np_image),
- fields.InputDataFields.groundtruth_classes:
- tf.constant(np.array([3, 1], np.int32))
- }
-
- def mul_two_model_preprocessor_fn(image):
- return (image * 2, tf.expand_dims(tf.shape(image)[1:], axis=0))
+ def graph_fn(image):
+ tensor_dict = {
+ fields.InputDataFields.image: image,
+ fields.InputDataFields.groundtruth_classes:
+ tf.constant(np.array([3, 1], np.int32))
+ }
- def add_five_to_image_data_augmentation_fn(tensor_dict):
- tensor_dict[fields.InputDataFields.image] += 5
- return tensor_dict
+ def mul_two_model_preprocessor_fn(image):
+ return (image * 2, tf.expand_dims(tf.shape(image)[1:], axis=0))
- num_classes = 4
- input_transformation_fn = functools.partial(
- inputs.transform_input_data,
- model_preprocess_fn=mul_two_model_preprocessor_fn,
- image_resizer_fn=_fake_image_resizer_fn,
- num_classes=num_classes,
- data_augmentation_fn=add_five_to_image_data_augmentation_fn)
- with self.test_session() as sess:
- augmented_tensor_dict = sess.run(
- input_transformation_fn(tensor_dict=tensor_dict))
+ def add_five_to_image_data_augmentation_fn(tensor_dict):
+ tensor_dict[fields.InputDataFields.image] += 5
+ return tensor_dict
- self.assertAllEqual(augmented_tensor_dict[fields.InputDataFields.image],
- (np_image + 5) * 2)
+ num_classes = 4
+ input_transformation_fn = functools.partial(
+ inputs.transform_input_data,
+ model_preprocess_fn=mul_two_model_preprocessor_fn,
+ image_resizer_fn=_fake_image_resizer_fn,
+ num_classes=num_classes,
+ data_augmentation_fn=add_five_to_image_data_augmentation_fn)
+ transformed_inputs = input_transformation_fn(tensor_dict)
+ return transformed_inputs[fields.InputDataFields.image]
+ image = self.execute_cpu(graph_fn, [np_image])
+ self.assertAllEqual(image, (np_image + 5) * 2)
def test_resize_with_padding(self):
+ def graph_fn():
+ tensor_dict = {
+ fields.InputDataFields.image:
+ tf.constant(np.random.rand(100, 50, 3).astype(np.float32)),
+ fields.InputDataFields.groundtruth_boxes:
+ tf.constant(np.array([[.5, .5, 1, 1], [.0, .0, .5, .5]],
+ np.float32)),
+ fields.InputDataFields.groundtruth_classes:
+ tf.constant(np.array([1, 2], np.int32)),
+ fields.InputDataFields.groundtruth_keypoints:
+ tf.constant([[[0.1, 0.2]], [[0.3, 0.4]]]),
+ }
- tensor_dict = {
- fields.InputDataFields.image:
- tf.constant(np.random.rand(100, 50, 3).astype(np.float32)),
- fields.InputDataFields.groundtruth_boxes:
- tf.constant(np.array([[.5, .5, 1, 1], [.0, .0, .5, .5]],
- np.float32)),
- fields.InputDataFields.groundtruth_classes:
- tf.constant(np.array([1, 2], np.int32)),
- fields.InputDataFields.groundtruth_keypoints:
- tf.constant([[[0.1, 0.2]], [[0.3, 0.4]]]),
- }
-
- num_classes = 3
- input_transformation_fn = functools.partial(
- inputs.transform_input_data,
- model_preprocess_fn=_fake_resize50_preprocess_fn,
- image_resizer_fn=_fake_image_resizer_fn,
- num_classes=num_classes,)
-
- with self.test_session() as sess:
- transformed_inputs = sess.run(
- input_transformation_fn(tensor_dict=tensor_dict))
+ num_classes = 3
+ input_transformation_fn = functools.partial(
+ inputs.transform_input_data,
+ model_preprocess_fn=_fake_resize50_preprocess_fn,
+ image_resizer_fn=_fake_image_resizer_fn,
+ num_classes=num_classes,)
+ transformed_inputs = input_transformation_fn(tensor_dict)
+ return (transformed_inputs[fields.InputDataFields.groundtruth_boxes],
+ transformed_inputs[fields.InputDataFields.groundtruth_keypoints])
+ groundtruth_boxes, groundtruth_keypoints = self.execute_cpu(graph_fn, [])
self.assertAllClose(
- transformed_inputs[fields.InputDataFields.groundtruth_boxes],
+ groundtruth_boxes,
[[.5, .25, 1., .5], [.0, .0, .5, .25]])
self.assertAllClose(
- transformed_inputs[fields.InputDataFields.groundtruth_keypoints],
+ groundtruth_keypoints,
[[[.1, .1]], [[.3, .2]]])
def test_groundtruth_keypoint_weights(self):
- tensor_dict = {
- fields.InputDataFields.image:
- tf.constant(np.random.rand(100, 50, 3).astype(np.float32)),
- fields.InputDataFields.groundtruth_boxes:
- tf.constant(np.array([[.5, .5, 1, 1], [.0, .0, .5, .5]],
- np.float32)),
- fields.InputDataFields.groundtruth_classes:
- tf.constant(np.array([1, 2], np.int32)),
- fields.InputDataFields.groundtruth_keypoints:
- tf.constant([[[0.1, 0.2], [0.3, 0.4]],
- [[0.5, 0.6], [0.7, 0.8]]]),
- fields.InputDataFields.groundtruth_keypoint_visibilities:
- tf.constant([[True, False], [True, True]]),
- }
-
- num_classes = 3
- keypoint_type_weight = [1.0, 2.0]
- input_transformation_fn = functools.partial(
- inputs.transform_input_data,
- model_preprocess_fn=_fake_resize50_preprocess_fn,
- image_resizer_fn=_fake_image_resizer_fn,
- num_classes=num_classes,
- keypoint_type_weight=keypoint_type_weight)
+ def graph_fn():
+ tensor_dict = {
+ fields.InputDataFields.image:
+ tf.constant(np.random.rand(100, 50, 3).astype(np.float32)),
+ fields.InputDataFields.groundtruth_boxes:
+ tf.constant(np.array([[.5, .5, 1, 1], [.0, .0, .5, .5]],
+ np.float32)),
+ fields.InputDataFields.groundtruth_classes:
+ tf.constant(np.array([1, 2], np.int32)),
+ fields.InputDataFields.groundtruth_keypoints:
+ tf.constant([[[0.1, 0.2], [0.3, 0.4]],
+ [[0.5, 0.6], [0.7, 0.8]]]),
+ fields.InputDataFields.groundtruth_keypoint_visibilities:
+ tf.constant([[True, False], [True, True]]),
+ }
- with self.test_session() as sess:
- transformed_inputs = sess.run(
- input_transformation_fn(tensor_dict=tensor_dict))
+ num_classes = 3
+ keypoint_type_weight = [1.0, 2.0]
+ input_transformation_fn = functools.partial(
+ inputs.transform_input_data,
+ model_preprocess_fn=_fake_resize50_preprocess_fn,
+ image_resizer_fn=_fake_image_resizer_fn,
+ num_classes=num_classes,
+ keypoint_type_weight=keypoint_type_weight)
+ transformed_inputs = input_transformation_fn(tensor_dict=tensor_dict)
+ return (transformed_inputs[fields.InputDataFields.groundtruth_keypoints],
+ transformed_inputs[fields.InputDataFields.
+ groundtruth_keypoint_weights])
+
+ groundtruth_keypoints, groundtruth_keypoint_weights = self.execute_cpu(
+ graph_fn, [])
self.assertAllClose(
- transformed_inputs[fields.InputDataFields.groundtruth_keypoints],
+ groundtruth_keypoints,
[[[0.1, 0.1], [0.3, 0.2]],
[[0.5, 0.3], [0.7, 0.4]]])
self.assertAllClose(
- transformed_inputs[fields.InputDataFields.groundtruth_keypoint_weights],
+ groundtruth_keypoint_weights,
[[1.0, 0.0], [1.0, 2.0]])
def test_groundtruth_keypoint_weights_default(self):
- tensor_dict = {
- fields.InputDataFields.image:
- tf.constant(np.random.rand(100, 50, 3).astype(np.float32)),
- fields.InputDataFields.groundtruth_boxes:
- tf.constant(np.array([[.5, .5, 1, 1], [.0, .0, .5, .5]],
- np.float32)),
- fields.InputDataFields.groundtruth_classes:
- tf.constant(np.array([1, 2], np.int32)),
- fields.InputDataFields.groundtruth_keypoints:
- tf.constant([[[0.1, 0.2], [0.3, 0.4]],
- [[0.5, 0.6], [0.7, 0.8]]]),
- }
-
- num_classes = 3
- input_transformation_fn = functools.partial(
- inputs.transform_input_data,
- model_preprocess_fn=_fake_resize50_preprocess_fn,
- image_resizer_fn=_fake_image_resizer_fn,
- num_classes=num_classes)
+ def graph_fn():
+ tensor_dict = {
+ fields.InputDataFields.image:
+ tf.constant(np.random.rand(100, 50, 3).astype(np.float32)),
+ fields.InputDataFields.groundtruth_boxes:
+ tf.constant(np.array([[.5, .5, 1, 1], [.0, .0, .5, .5]],
+ np.float32)),
+ fields.InputDataFields.groundtruth_classes:
+ tf.constant(np.array([1, 2], np.int32)),
+ fields.InputDataFields.groundtruth_keypoints:
+ tf.constant([[[0.1, 0.2], [0.3, 0.4]],
+ [[0.5, 0.6], [0.7, 0.8]]]),
+ }
- with self.test_session() as sess:
- transformed_inputs = sess.run(
- input_transformation_fn(tensor_dict=tensor_dict))
+ num_classes = 3
+ input_transformation_fn = functools.partial(
+ inputs.transform_input_data,
+ model_preprocess_fn=_fake_resize50_preprocess_fn,
+ image_resizer_fn=_fake_image_resizer_fn,
+ num_classes=num_classes)
+ transformed_inputs = input_transformation_fn(tensor_dict=tensor_dict)
+ return (transformed_inputs[fields.InputDataFields.groundtruth_keypoints],
+ transformed_inputs[fields.InputDataFields.
+ groundtruth_keypoint_weights])
+ groundtruth_keypoints, groundtruth_keypoint_weights = self.execute_cpu(
+ graph_fn, [])
self.assertAllClose(
- transformed_inputs[fields.InputDataFields.groundtruth_keypoints],
+ groundtruth_keypoints,
[[[0.1, 0.1], [0.3, 0.2]],
[[0.5, 0.3], [0.7, 0.4]]])
self.assertAllClose(
- transformed_inputs[fields.InputDataFields.groundtruth_keypoint_weights],
+ groundtruth_keypoint_weights,
[[1.0, 1.0], [1.0, 1.0]])
@@ -1303,15 +1299,15 @@ class PadInputDataToStaticShapesFnTest(test_case.TestCase):
def test_pad_images_boxes_and_classes(self):
input_tensor_dict = {
fields.InputDataFields.image:
- tf.placeholder(tf.float32, [None, None, 3]),
+ tf.random.uniform([3, 3, 3]),
fields.InputDataFields.groundtruth_boxes:
- tf.placeholder(tf.float32, [None, 4]),
+ tf.random.uniform([2, 4]),
fields.InputDataFields.groundtruth_classes:
- tf.placeholder(tf.int32, [None, 3]),
+ tf.random.uniform([2, 3], minval=0, maxval=2, dtype=tf.int32),
fields.InputDataFields.true_image_shape:
- tf.placeholder(tf.int32, [3]),
+ tf.constant([3, 3, 3]),
fields.InputDataFields.original_image_spatial_shape:
- tf.placeholder(tf.int32, [2])
+ tf.constant([3, 3])
}
padded_tensor_dict = inputs.pad_input_data_to_static_shapes(
tensor_dict=input_tensor_dict,
@@ -1336,69 +1332,35 @@ class PadInputDataToStaticShapesFnTest(test_case.TestCase):
.shape.as_list(), [3, 3])
def test_clip_boxes_and_classes(self):
- input_tensor_dict = {
- fields.InputDataFields.groundtruth_boxes:
- tf.placeholder(tf.float32, [None, 4]),
- fields.InputDataFields.groundtruth_classes:
- tf.placeholder(tf.int32, [None, 3]),
- fields.InputDataFields.num_groundtruth_boxes:
- tf.placeholder(tf.int32, [])
- }
- padded_tensor_dict = inputs.pad_input_data_to_static_shapes(
- tensor_dict=input_tensor_dict,
- max_num_boxes=3,
- num_classes=3,
- spatial_image_shape=[5, 6])
-
- self.assertAllEqual(
- padded_tensor_dict[fields.InputDataFields.groundtruth_boxes]
- .shape.as_list(), [3, 4])
- self.assertAllEqual(
- padded_tensor_dict[fields.InputDataFields.groundtruth_classes]
- .shape.as_list(), [3, 3])
-
- with self.test_session() as sess:
- out_tensor_dict = sess.run(
- padded_tensor_dict,
- feed_dict={
- input_tensor_dict[fields.InputDataFields.groundtruth_boxes]:
- np.random.rand(5, 4),
- input_tensor_dict[fields.InputDataFields.groundtruth_classes]:
- np.random.rand(2, 3),
- input_tensor_dict[fields.InputDataFields.num_groundtruth_boxes]:
- 5,
- })
-
- self.assertAllEqual(
- out_tensor_dict[fields.InputDataFields.groundtruth_boxes].shape, [3, 4])
- self.assertAllEqual(
- out_tensor_dict[fields.InputDataFields.groundtruth_classes].shape,
- [3, 3])
- self.assertEqual(
- out_tensor_dict[fields.InputDataFields.num_groundtruth_boxes],
- 3)
-
- def test_do_not_pad_dynamic_images(self):
- input_tensor_dict = {
- fields.InputDataFields.image:
- tf.placeholder(tf.float32, [None, None, 3]),
- }
- padded_tensor_dict = inputs.pad_input_data_to_static_shapes(
- tensor_dict=input_tensor_dict,
- max_num_boxes=3,
- num_classes=3,
- spatial_image_shape=[None, None])
-
- self.assertAllEqual(
- padded_tensor_dict[fields.InputDataFields.image].shape.as_list(),
- [None, None, 3])
+ def graph_fn():
+ input_tensor_dict = {
+ fields.InputDataFields.groundtruth_boxes:
+ tf.random.uniform([5, 4]),
+ fields.InputDataFields.groundtruth_classes:
+ tf.random.uniform([2, 3], maxval=10, dtype=tf.int32),
+ fields.InputDataFields.num_groundtruth_boxes:
+ tf.constant(5)
+ }
+ padded_tensor_dict = inputs.pad_input_data_to_static_shapes(
+ tensor_dict=input_tensor_dict,
+ max_num_boxes=3,
+ num_classes=3,
+ spatial_image_shape=[5, 6])
+ return (padded_tensor_dict[fields.InputDataFields.groundtruth_boxes],
+ padded_tensor_dict[fields.InputDataFields.groundtruth_classes],
+ padded_tensor_dict[fields.InputDataFields.num_groundtruth_boxes])
+ (groundtruth_boxes, groundtruth_classes,
+ num_groundtruth_boxes) = self.execute_cpu(graph_fn, [])
+ self.assertAllEqual(groundtruth_boxes.shape, [3, 4])
+ self.assertAllEqual(groundtruth_classes.shape, [3, 3])
+ self.assertEqual(num_groundtruth_boxes, 3)
def test_images_and_additional_channels(self):
input_tensor_dict = {
fields.InputDataFields.image:
- tf.placeholder(tf.float32, [None, None, 5]),
+ test_utils.image_with_dynamic_shape(4, 3, 5),
fields.InputDataFields.image_additional_channels:
- tf.placeholder(tf.float32, [None, None, 2]),
+ test_utils.image_with_dynamic_shape(4, 3, 2),
}
padded_tensor_dict = inputs.pad_input_data_to_static_shapes(
tensor_dict=input_tensor_dict,
@@ -1418,11 +1380,11 @@ class PadInputDataToStaticShapesFnTest(test_case.TestCase):
def test_images_and_additional_channels_errors(self):
input_tensor_dict = {
fields.InputDataFields.image:
- tf.placeholder(tf.float32, [None, None, 3]),
+ test_utils.image_with_dynamic_shape(10, 10, 3),
fields.InputDataFields.image_additional_channels:
- tf.placeholder(tf.float32, [None, None, 2]),
+ test_utils.image_with_dynamic_shape(10, 10, 2),
fields.InputDataFields.original_image:
- tf.placeholder(tf.float32, [None, None, 3]),
+ test_utils.image_with_dynamic_shape(10, 10, 3),
}
with self.assertRaises(ValueError):
_ = inputs.pad_input_data_to_static_shapes(
@@ -1434,7 +1396,7 @@ class PadInputDataToStaticShapesFnTest(test_case.TestCase):
def test_gray_images(self):
input_tensor_dict = {
fields.InputDataFields.image:
- tf.placeholder(tf.float32, [None, None, 1]),
+ test_utils.image_with_dynamic_shape(4, 4, 1),
}
padded_tensor_dict = inputs.pad_input_data_to_static_shapes(
tensor_dict=input_tensor_dict,
@@ -1449,9 +1411,9 @@ class PadInputDataToStaticShapesFnTest(test_case.TestCase):
def test_gray_images_and_additional_channels(self):
input_tensor_dict = {
fields.InputDataFields.image:
- tf.placeholder(tf.float32, [None, None, 3]),
+ test_utils.image_with_dynamic_shape(4, 4, 3),
fields.InputDataFields.image_additional_channels:
- tf.placeholder(tf.float32, [None, None, 2]),
+ test_utils.image_with_dynamic_shape(4, 4, 2),
}
# pad_input_data_to_static_shape assumes that image is already concatenated
# with additional channels.
@@ -1469,11 +1431,14 @@ class PadInputDataToStaticShapesFnTest(test_case.TestCase):
.shape.as_list(), [5, 6, 2])
def test_keypoints(self):
+ keypoints = test_utils.keypoints_with_dynamic_shape(10, 16, 4)
+ visibilities = tf.cast(tf.random.uniform(tf.shape(keypoints)[:-1], minval=0,
+ maxval=2, dtype=tf.int32), tf.bool)
input_tensor_dict = {
fields.InputDataFields.groundtruth_keypoints:
- tf.placeholder(tf.float32, [None, 16, 4]),
+ test_utils.keypoints_with_dynamic_shape(10, 16, 4),
fields.InputDataFields.groundtruth_keypoint_visibilities:
- tf.placeholder(tf.bool, [None, 16]),
+ visibilities
}
padded_tensor_dict = inputs.pad_input_data_to_static_shapes(
tensor_dict=input_tensor_dict,
@@ -1493,39 +1458,76 @@ class PadInputDataToStaticShapesFnTest(test_case.TestCase):
context_memory_size = 8
context_feature_length = 10
max_num_context_features = 20
- input_tensor_dict = {
- fields.InputDataFields.context_features:
- tf.placeholder(tf.float32,
- [context_memory_size, context_feature_length]),
- fields.InputDataFields.context_feature_length:
- tf.placeholder(tf.float32, [])
- }
- padded_tensor_dict = inputs.pad_input_data_to_static_shapes(
- tensor_dict=input_tensor_dict,
- max_num_boxes=3,
- num_classes=3,
- spatial_image_shape=[5, 6],
- max_num_context_features=max_num_context_features,
- context_feature_length=context_feature_length)
+ def graph_fn():
+ input_tensor_dict = {
+ fields.InputDataFields.context_features:
+ tf.ones([context_memory_size, context_feature_length]),
+ fields.InputDataFields.context_feature_length:
+ tf.constant(context_feature_length)
+ }
+ padded_tensor_dict = inputs.pad_input_data_to_static_shapes(
+ tensor_dict=input_tensor_dict,
+ max_num_boxes=3,
+ num_classes=3,
+ spatial_image_shape=[5, 6],
+ max_num_context_features=max_num_context_features,
+ context_feature_length=context_feature_length)
- self.assertAllEqual(
- padded_tensor_dict[
- fields.InputDataFields.context_features].shape.as_list(),
- [max_num_context_features, context_feature_length])
+ self.assertAllEqual(
+ padded_tensor_dict[
+ fields.InputDataFields.context_features].shape.as_list(),
+ [max_num_context_features, context_feature_length])
+ return padded_tensor_dict[fields.InputDataFields.valid_context_size]
- with self.test_session() as sess:
- feed_dict = {
- input_tensor_dict[fields.InputDataFields.context_features]:
- np.ones([context_memory_size, context_feature_length],
- dtype=np.float32),
- input_tensor_dict[fields.InputDataFields.context_feature_length]:
- context_feature_length
+ valid_context_size = self.execute_cpu(graph_fn, [])
+ self.assertEqual(valid_context_size, context_memory_size)
+
+
+class NegativeSizeTest(test_case.TestCase):
+ """Test for inputs and related funcitons."""
+
+ def test_negative_size_error(self):
+ """Test that error is raised for negative size boxes."""
+
+ def graph_fn():
+ tensors = {
+ fields.InputDataFields.image: tf.zeros((128, 128, 3)),
+ fields.InputDataFields.groundtruth_classes:
+ tf.constant([1, 1], tf.int32),
+ fields.InputDataFields.groundtruth_boxes:
+ tf.constant([[0.5, 0.5, 0.4, 0.5]], tf.float32)
}
- padded_tensor_dict_out = sess.run(padded_tensor_dict, feed_dict=feed_dict)
+ tensors = inputs.transform_input_data(
+ tensors, _fake_model_preprocessor_fn, _fake_image_resizer_fn,
+ num_classes=10)
+ return tensors[fields.InputDataFields.groundtruth_boxes]
+ with self.assertRaises(tf.errors.InvalidArgumentError):
+ self.execute_cpu(graph_fn, [])
+
+ def test_negative_size_no_assert(self):
+ """Test that negative size boxes are filtered out without assert.
+
+ This test simulates the behaviour when we run on TPU and Assert ops are
+ not supported.
+ """
- self.assertEqual(
- padded_tensor_dict_out[fields.InputDataFields.valid_context_size],
- context_memory_size)
+ tensors = {
+ fields.InputDataFields.image: tf.zeros((128, 128, 3)),
+ fields.InputDataFields.groundtruth_classes:
+ tf.constant([1, 1], tf.int32),
+ fields.InputDataFields.groundtruth_boxes:
+ tf.constant([[0.5, 0.5, 0.4, 0.5], [0.5, 0.5, 0.6, 0.6]],
+ tf.float32)
+ }
+
+ with mock.patch.object(tf, 'Assert') as tf_assert:
+ tf_assert.return_value = tf.no_op()
+ tensors = inputs.transform_input_data(
+ tensors, _fake_model_preprocessor_fn, _fake_image_resizer_fn,
+ num_classes=10)
+
+ self.assertAllClose(tensors[fields.InputDataFields.groundtruth_boxes],
+ [[0.5, 0.5, 0.6, 0.6]])
if __name__ == '__main__':
diff --git a/research/object_detection/legacy/trainer_test.py b/research/object_detection/legacy/trainer_tf1_test.py
similarity index 97%
rename from research/object_detection/legacy/trainer_test.py
rename to research/object_detection/legacy/trainer_tf1_test.py
index 3a5d073048933e98278f423f348c77b3cc2860ae..0cde654e6a8bba2cfedea939e67d44698f882e04 100644
--- a/research/object_detection/legacy/trainer_test.py
+++ b/research/object_detection/legacy/trainer_tf1_test.py
@@ -14,7 +14,7 @@
# ==============================================================================
"""Tests for object_detection.trainer."""
-
+import unittest
import tensorflow.compat.v1 as tf
import tf_slim as slim
from google.protobuf import text_format
@@ -24,6 +24,7 @@ from object_detection.core import model
from object_detection.core import standard_fields as fields
from object_detection.legacy import trainer
from object_detection.protos import train_pb2
+from object_detection.utils import tf_version
NUMBER_OF_CLASSES = 2
@@ -184,6 +185,9 @@ class FakeDetectionModel(model.DetectionModel):
"""
return {var.op.name: var for var in tf.global_variables()}
+ def restore_from_objects(self, fine_tune_checkpoint_type):
+ pass
+
def updates(self):
"""Returns a list of update operators for this model.
@@ -197,6 +201,7 @@ class FakeDetectionModel(model.DetectionModel):
pass
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.')
class TrainerTest(tf.test.TestCase):
def test_configure_trainer_and_train_two_steps(self):
diff --git a/research/object_detection/matchers/bipartite_matcher_test.py b/research/object_detection/matchers/bipartite_matcher_tf1_test.py
similarity index 94%
rename from research/object_detection/matchers/bipartite_matcher_test.py
rename to research/object_detection/matchers/bipartite_matcher_tf1_test.py
index 1617cbbc3876f5aa1da90918557d68ecaa25360a..314546ad4ee507d3024746044d4d4a30bc92e85d 100644
--- a/research/object_detection/matchers/bipartite_matcher_test.py
+++ b/research/object_detection/matchers/bipartite_matcher_tf1_test.py
@@ -14,14 +14,18 @@
# ==============================================================================
"""Tests for object_detection.core.bipartite_matcher."""
-
+import unittest
import numpy as np
import tensorflow.compat.v1 as tf
-from object_detection.matchers import bipartite_matcher
from object_detection.utils import test_case
+from object_detection.utils import tf_version
+
+if tf_version.is_tf1():
+ from object_detection.matchers import bipartite_matcher # pylint: disable=g-import-not-at-top
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.')
class GreedyBipartiteMatcherTest(test_case.TestCase):
def test_get_expected_matches_when_all_rows_are_valid(self):
diff --git a/research/object_detection/meta_architectures/center_net_meta_arch.py b/research/object_detection/meta_architectures/center_net_meta_arch.py
new file mode 100644
index 0000000000000000000000000000000000000000..6d4f9a2ba9737c5dd86fe22226954ddfbb16c959
--- /dev/null
+++ b/research/object_detection/meta_architectures/center_net_meta_arch.py
@@ -0,0 +1,2379 @@
+# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""The CenterNet meta architecture as described in the "Objects as Points" paper [1].
+
+[1]: https://arxiv.org/abs/1904.07850
+
+"""
+
+import abc
+import collections
+import functools
+import numpy as np
+import tensorflow.compat.v1 as tf
+import tensorflow.compat.v2 as tf2
+
+from object_detection.core import box_list
+from object_detection.core import box_list_ops
+from object_detection.core import keypoint_ops
+from object_detection.core import model
+from object_detection.core import standard_fields as fields
+from object_detection.core import target_assigner as cn_assigner
+from object_detection.utils import shape_utils
+
+# Number of channels needed to predict size and offsets.
+NUM_OFFSET_CHANNELS = 2
+NUM_SIZE_CHANNELS = 2
+
+# Error range for detecting peaks.
+PEAK_EPSILON = 1e-6
+
+# Constants shared between all keypoint tasks.
+UNMATCHED_KEYPOINT_SCORE = 0.1
+KEYPOINT_CANDIDATE_SEARCH_SCALE = 0.3
+
+
+class CenterNetFeatureExtractor(tf.keras.Model):
+ """Base class for feature extractors for the CenterNet meta architecture.
+
+ Child classes are expected to override the _output_model property which will
+ return 1 or more tensors predicted by the feature extractor.
+
+ """
+ __metaclass__ = abc.ABCMeta
+
+ def __init__(self, name=None, channel_means=(0., 0., 0.),
+ channel_stds=(1., 1., 1.), bgr_ordering=False):
+ """Initializes a CenterNet feature extractor.
+
+ Args:
+ name: str, the name used for the underlying keras model.
+ channel_means: A tuple of floats, denoting the mean of each channel
+ which will be subtracted from it. If None or empty, we use 0s.
+ channel_stds: A tuple of floats, denoting the standard deviation of each
+ channel. Each channel will be divided by its standard deviation value.
+ If None or empty, we use 1s.
+ bgr_ordering: bool, if set will change the channel ordering to be in the
+ [blue, red, green] order.
+ """
+ super(CenterNetFeatureExtractor, self).__init__(name=name)
+
+ if channel_means is None or len(channel_means) == 0: # pylint:disable=g-explicit-length-test
+ channel_means = [0., 0., 0.]
+
+ if channel_stds is None or len(channel_stds) == 0: # pylint:disable=g-explicit-length-test
+ channel_stds = [1., 1., 1.]
+
+ self._channel_means = channel_means
+ self._channel_stds = channel_stds
+ self._bgr_ordering = bgr_ordering
+
+ def preprocess(self, inputs):
+ """Converts a batch of unscaled images to a scale suitable for the model.
+
+ This method normalizes the image using the given `channel_means` and
+ `channels_stds` values at initialization time while optionally flipping
+ the channel order if `bgr_ordering` is set.
+
+ Args:
+ inputs: a [batch, height, width, channels] float32 tensor
+
+ Returns:
+ outputs: a [batch, height, width, channels] float32 tensor
+
+ """
+
+ if self._bgr_ordering:
+ red, green, blue = tf.unstack(inputs, axis=3)
+ inputs = tf.stack([blue, green, red], axis=3)
+
+ channel_means = tf.reshape(tf.constant(self._channel_means),
+ [1, 1, 1, -1])
+ channel_stds = tf.reshape(tf.constant(self._channel_stds),
+ [1, 1, 1, -1])
+
+ return (inputs - channel_means)/channel_stds
+
+ @property
+ @abc.abstractmethod
+ def out_stride(self):
+ """The stride in the output image of the network."""
+ pass
+
+ @property
+ @abc.abstractmethod
+ def num_feature_outputs(self):
+ """Ther number of feature outputs returned by the feature extractor."""
+ pass
+
+
+def make_prediction_net(num_out_channels, kernel_size=3, num_filters=256,
+ bias_fill=None):
+ """Creates a network to predict the given number of output channels.
+
+ This function is intended to make the prediction heads for the CenterNet
+ meta architecture.
+
+ Args:
+ num_out_channels: Number of output channels.
+ kernel_size: The size of the conv kernel in the intermediate layer
+ num_filters: The number of filters in the intermediate conv layer.
+ bias_fill: If not None, is used to initialize the bias in the final conv
+ layer.
+
+ Returns:
+ net: A keras module which when called on an input tensor of size
+ [batch_size, height, width, num_in_channels] returns an output
+ of size [batch_size, height, width, num_out_channels]
+ """
+
+ out_conv = tf.keras.layers.Conv2D(num_out_channels, kernel_size=1)
+
+ if bias_fill is not None:
+ out_conv.bias_initializer = tf.keras.initializers.constant(bias_fill)
+
+ net = tf.keras.Sequential(
+ [tf.keras.layers.Conv2D(num_filters, kernel_size=kernel_size,
+ padding='same'),
+ tf.keras.layers.ReLU(),
+ out_conv]
+ )
+
+ return net
+
+
+def _to_float32(x):
+ return tf.cast(x, tf.float32)
+
+
+def _get_shape(tensor, num_dims):
+ tf.Assert(tensor.get_shape().ndims == num_dims, [tensor])
+ return shape_utils.combined_static_and_dynamic_shape(tensor)
+
+
+def _flatten_spatial_dimensions(batch_images):
+ batch_size, height, width, channels = _get_shape(batch_images, 4)
+ return tf.reshape(batch_images, [batch_size, height * width,
+ channels])
+
+
+def top_k_feature_map_locations(feature_map, max_pool_kernel_size=3, k=100,
+ per_channel=False):
+ """Returns the top k scores and their locations in a feature map.
+
+ Given a feature map, the top k values (based on activation) are returned. If
+ `per_channel` is True, the top k values **per channel** are returned.
+
+ The `max_pool_kernel_size` argument allows for selecting local peaks in a
+ region. This filtering is done per channel, so nothing prevents two values at
+ the same location to be returned.
+
+ Args:
+ feature_map: [batch, height, width, channels] float32 feature map.
+ max_pool_kernel_size: integer, the max pool kernel size to use to pull off
+ peak score locations in a neighborhood (independently for each channel).
+ For example, to make sure no two neighboring values (in the same channel)
+ are returned, set max_pool_kernel_size=3. If None or 1, will not apply max
+ pooling.
+ k: The number of highest scoring locations to return.
+ per_channel: If True, will return the top k scores and locations per
+ feature map channel. If False, the top k across the entire feature map
+ (height x width x channels) are returned.
+
+ Returns:
+ Tuple of
+ scores: A [batch, N] float32 tensor with scores from the feature map in
+ descending order. If per_channel is False, N = k. Otherwise,
+ N = k * channels, and the first k elements correspond to channel 0, the
+ second k correspond to channel 1, etc.
+ y_indices: A [batch, N] int tensor with y indices of the top k feature map
+ locations. If per_channel is False, N = k. Otherwise,
+ N = k * channels.
+ x_indices: A [batch, N] int tensor with x indices of the top k feature map
+ locations. If per_channel is False, N = k. Otherwise,
+ N = k * channels.
+ channel_indices: A [batch, N] int tensor with channel indices of the top k
+ feature map locations. If per_channel is False, N = k. Otherwise,
+ N = k * channels.
+ """
+ if not max_pool_kernel_size or max_pool_kernel_size == 1:
+ feature_map_peaks = feature_map
+ else:
+ feature_map_max_pool = tf.nn.max_pool(
+ feature_map, ksize=max_pool_kernel_size, strides=1, padding='SAME')
+
+ feature_map_peak_mask = tf.math.abs(
+ feature_map - feature_map_max_pool) < PEAK_EPSILON
+
+ # Zero out everything that is not a peak.
+ feature_map_peaks = (
+ feature_map * _to_float32(feature_map_peak_mask))
+
+ batch_size, _, width, num_channels = _get_shape(feature_map, 4)
+
+ if per_channel:
+ # Perform top k over batch and channels.
+ feature_map_peaks_transposed = tf.transpose(feature_map_peaks,
+ perm=[0, 3, 1, 2])
+ feature_map_peaks_transposed = tf.reshape(
+ feature_map_peaks_transposed, [batch_size, num_channels, -1])
+ scores, peak_flat_indices = tf.math.top_k(feature_map_peaks_transposed, k=k)
+ # Convert the indices such that they represent the location in the full
+ # (flattened) feature map of size [batch, height * width * channels].
+ channel_idx = tf.range(num_channels)[tf.newaxis, :, tf.newaxis]
+ peak_flat_indices = num_channels * peak_flat_indices + channel_idx
+ scores = tf.reshape(scores, [batch_size, -1])
+ peak_flat_indices = tf.reshape(peak_flat_indices, [batch_size, -1])
+ else:
+ feature_map_peaks_flat = tf.reshape(feature_map_peaks, [batch_size, -1])
+ scores, peak_flat_indices = tf.math.top_k(feature_map_peaks_flat, k=k)
+
+ # Get x, y and channel indices corresponding to the top indices in the flat
+ # array.
+ y_indices, x_indices, channel_indices = (
+ row_col_channel_indices_from_flattened_indices(
+ peak_flat_indices, width, num_channels))
+ return scores, y_indices, x_indices, channel_indices
+
+
+def prediction_tensors_to_boxes(detection_scores, y_indices, x_indices,
+ channel_indices, height_width_predictions,
+ offset_predictions):
+ """Converts CenterNet class-center, offset and size predictions to boxes.
+
+ Args:
+ detection_scores: A [batch, num_boxes] float32 tensor with detection
+ scores in range [0, 1].
+ y_indices: A [batch, num_boxes] int32 tensor with y indices corresponding to
+ object center locations (expressed in output coordinate frame).
+ x_indices: A [batch, num_boxes] int32 tensor with x indices corresponding to
+ object center locations (expressed in output coordinate frame).
+ channel_indices: A [batch, num_boxes] int32 tensor with channel indices
+ corresponding to object classes.
+ height_width_predictions: A float tensor of shape [batch_size, height,
+ width, 2] representing the height and width of a box centered at each
+ pixel.
+ offset_predictions: A float tensor of shape [batch_size, height, width, 2]
+ representing the y and x offsets of a box centered at each pixel. This
+ helps reduce the error from downsampling.
+
+ Returns:
+ detection_boxes: A tensor of shape [batch_size, num_boxes, 4] holding the
+ the raw bounding box coordinates of boxes.
+ detection_classes: An integer tensor of shape [batch_size, num_boxes]
+ indicating the predicted class for each box.
+ detection_scores: A float tensor of shape [batch_size, num_boxes] indicating
+ the score for each box.
+ num_detections: An integer tensor of shape [batch_size,] indicating the
+ number of boxes detected for each sample in the batch.
+
+ """
+ _, _, width, _ = _get_shape(height_width_predictions, 4)
+
+ peak_spatial_indices = flattened_indices_from_row_col_indices(
+ y_indices, x_indices, width)
+ y_indices = _to_float32(y_indices)
+ x_indices = _to_float32(x_indices)
+
+ height_width_flat = _flatten_spatial_dimensions(height_width_predictions)
+ offsets_flat = _flatten_spatial_dimensions(offset_predictions)
+
+ height_width = tf.gather(height_width_flat, peak_spatial_indices,
+ batch_dims=1)
+ offsets = tf.gather(offsets_flat, peak_spatial_indices, batch_dims=1)
+
+ heights, widths = tf.unstack(height_width, axis=2)
+ y_offsets, x_offsets = tf.unstack(offsets, axis=2)
+
+ detection_classes = channel_indices
+
+ num_detections = tf.reduce_sum(tf.to_int32(detection_scores > 0), axis=1)
+
+ boxes = tf.stack([y_indices + y_offsets - heights / 2.0,
+ x_indices + x_offsets - widths / 2.0,
+ y_indices + y_offsets + heights / 2.0,
+ x_indices + x_offsets + widths / 2.0], axis=2)
+
+ return boxes, detection_classes, detection_scores, num_detections
+
+
+def prediction_tensors_to_keypoint_candidates(
+ keypoint_heatmap_predictions,
+ keypoint_heatmap_offsets,
+ keypoint_score_threshold=0.1,
+ max_pool_kernel_size=1,
+ max_candidates=20):
+ """Convert keypoint heatmap predictions and offsets to keypoint candidates.
+
+ Args:
+ keypoint_heatmap_predictions: A float tensor of shape [batch_size, height,
+ width, num_keypoints] representing the per-keypoint heatmaps.
+ keypoint_heatmap_offsets: A float tensor of shape [batch_size, height,
+ width, 2] (or [batch_size, height, width, 2 * num_keypoints] if
+ 'per_keypoint_offset' is set True) representing the per-keypoint offsets.
+ keypoint_score_threshold: float, the threshold for considering a keypoint
+ a candidate.
+ max_pool_kernel_size: integer, the max pool kernel size to use to pull off
+ peak score locations in a neighborhood. For example, to make sure no two
+ neighboring values for the same keypoint are returned, set
+ max_pool_kernel_size=3. If None or 1, will not apply any local filtering.
+ max_candidates: integer, maximum number of keypoint candidates per
+ keypoint type.
+
+ Returns:
+ keypoint_candidates: A tensor of shape
+ [batch_size, max_candidates, num_keypoints, 2] holding the
+ location of keypoint candidates in [y, x] format (expressed in absolute
+ coordinates in the output coordinate frame).
+ keypoint_scores: A float tensor of shape
+ [batch_size, max_candidates, num_keypoints] with the scores for each
+ keypoint candidate. The scores come directly from the heatmap predictions.
+ num_keypoint_candidates: An integer tensor of shape
+ [batch_size, num_keypoints] with the number of candidates for each
+ keypoint type, as it's possible to filter some candidates due to the score
+ threshold.
+ """
+ batch_size, _, width, num_keypoints = _get_shape(
+ keypoint_heatmap_predictions, 4)
+ # Get x, y and channel indices corresponding to the top indices in the
+ # keypoint heatmap predictions.
+ # Note that the top k candidates are produced for **each keypoint type**.
+ # Might be worth eventually trying top k in the feature map, independent of
+ # the keypoint type.
+ keypoint_scores, y_indices, x_indices, channel_indices = (
+ top_k_feature_map_locations(keypoint_heatmap_predictions,
+ max_pool_kernel_size=max_pool_kernel_size,
+ k=max_candidates,
+ per_channel=True))
+
+ peak_spatial_indices = flattened_indices_from_row_col_indices(
+ y_indices, x_indices, width)
+ y_indices = _to_float32(y_indices)
+ x_indices = _to_float32(x_indices)
+
+ offsets_flat = _flatten_spatial_dimensions(keypoint_heatmap_offsets)
+
+ selected_offsets = tf.gather(offsets_flat, peak_spatial_indices, batch_dims=1)
+ _, num_indices, num_channels = _get_shape(selected_offsets, 3)
+ if num_channels > 2:
+ reshaped_offsets = tf.reshape(selected_offsets,
+ [batch_size, num_indices, -1, 2])
+ offsets = tf.gather(reshaped_offsets, channel_indices, batch_dims=2)
+ else:
+ offsets = selected_offsets
+ y_offsets, x_offsets = tf.unstack(offsets, axis=2)
+
+ keypoint_candidates = tf.stack([y_indices + y_offsets,
+ x_indices + x_offsets], axis=2)
+ keypoint_candidates = tf.reshape(
+ keypoint_candidates,
+ [batch_size, num_keypoints, max_candidates, 2])
+ keypoint_candidates = tf.transpose(keypoint_candidates, [0, 2, 1, 3])
+ keypoint_scores = tf.reshape(
+ keypoint_scores,
+ [batch_size, num_keypoints, max_candidates])
+ keypoint_scores = tf.transpose(keypoint_scores, [0, 2, 1])
+ num_candidates = tf.reduce_sum(
+ tf.to_int32(keypoint_scores >= keypoint_score_threshold), axis=1)
+
+ return keypoint_candidates, keypoint_scores, num_candidates
+
+
+def regressed_keypoints_at_object_centers(regressed_keypoint_predictions,
+ y_indices, x_indices):
+ """Returns the regressed keypoints at specified object centers.
+
+ The original keypoint predictions are regressed relative to each feature map
+ location. The returned keypoints are expressed in absolute coordinates in the
+ output frame (i.e. the center offsets are added to each individual regressed
+ set of keypoints).
+
+ Args:
+ regressed_keypoint_predictions: A float tensor of shape
+ [batch_size, height, width, 2 * num_keypoints] holding regressed
+ keypoints. The last dimension has keypoint coordinates ordered as follows:
+ [y0, x0, y1, x1, ..., y{J-1}, x{J-1}] where J is the number of keypoints.
+ y_indices: A [batch, num_instances] int tensor holding y indices for object
+ centers. These indices correspond to locations in the output feature map.
+ x_indices: A [batch, num_instances] int tensor holding x indices for object
+ centers. These indices correspond to locations in the output feature map.
+
+ Returns:
+ A float tensor of shape [batch_size, num_objects, 2 * num_keypoints] where
+ regressed keypoints are gathered at the provided locations, and converted
+ to absolute coordinates in the output coordinate frame.
+ """
+ batch_size, _, width, _ = _get_shape(regressed_keypoint_predictions, 4)
+ flattened_indices = flattened_indices_from_row_col_indices(
+ y_indices, x_indices, width)
+ _, num_instances = _get_shape(flattened_indices, 2)
+
+ regressed_keypoints_flat = _flatten_spatial_dimensions(
+ regressed_keypoint_predictions)
+
+ relative_regressed_keypoints = tf.gather(
+ regressed_keypoints_flat, flattened_indices, batch_dims=1)
+ relative_regressed_keypoints = tf.reshape(
+ relative_regressed_keypoints,
+ [batch_size, num_instances, -1, 2])
+ relative_regressed_keypoints_y, relative_regressed_keypoints_x = tf.unstack(
+ relative_regressed_keypoints, axis=3)
+ y_indices = _to_float32(tf.expand_dims(y_indices, axis=-1))
+ x_indices = _to_float32(tf.expand_dims(x_indices, axis=-1))
+ absolute_regressed_keypoints = tf.stack(
+ [y_indices + relative_regressed_keypoints_y,
+ x_indices + relative_regressed_keypoints_x],
+ axis=3)
+ return tf.reshape(absolute_regressed_keypoints,
+ [batch_size, num_instances, -1])
+
+
+def refine_keypoints(regressed_keypoints, keypoint_candidates, keypoint_scores,
+ num_keypoint_candidates, bboxes=None,
+ unmatched_keypoint_score=0.1, box_scale=1.2,
+ candidate_search_scale=0.3,
+ candidate_ranking_mode='min_distance'):
+ """Refines regressed keypoints by snapping to the nearest candidate keypoints.
+
+ The initial regressed keypoints represent a full set of keypoints regressed
+ from the centers of the objects. The keypoint candidates are estimated
+ independently from heatmaps, and are not associated with any object instances.
+ This function refines the regressed keypoints by "snapping" to the
+ nearest/highest score/highest score-distance ratio (depending on the
+ candidate_ranking_mode) candidate of the same keypoint type (e.g. "nose").
+ If no candidates are nearby, the regressed keypoint remains unchanged.
+
+ In order to snap a regressed keypoint to a candidate keypoint, the following
+ must be satisfied:
+ - the candidate keypoint must be of the same type as the regressed keypoint
+ - the candidate keypoint must not lie outside the predicted boxes (or the
+ boxes which encloses the regressed keypoints for the instance if `bboxes` is
+ not provided). Note that the box is scaled by
+ `regressed_box_scale` in height and width, to provide some margin around the
+ keypoints
+ - the distance to the closest candidate keypoint cannot exceed
+ candidate_search_scale * max(height, width), where height and width refer to
+ the bounding box for the instance.
+
+ Note that the same candidate keypoint is allowed to snap to regressed
+ keypoints in difference instances.
+
+ Args:
+ regressed_keypoints: A float tensor of shape
+ [batch_size, num_instances, num_keypoints, 2] with the initial regressed
+ keypoints.
+ keypoint_candidates: A tensor of shape
+ [batch_size, max_candidates, num_keypoints, 2] holding the location of
+ keypoint candidates in [y, x] format (expressed in absolute coordinates in
+ the output coordinate frame).
+ keypoint_scores: A float tensor of shape
+ [batch_size, max_candidates, num_keypoints] indicating the scores for
+ keypoint candidates.
+ num_keypoint_candidates: An integer tensor of shape
+ [batch_size, num_keypoints] indicating the number of valid candidates for
+ each keypoint type, as there may be padding (dim 1) of
+ `keypoint_candidates` and `keypoint_scores`.
+ bboxes: A tensor of shape [batch_size, num_instances, 4] with predicted
+ bounding boxes for each instance, expressed in the output coordinate
+ frame. If not provided, boxes will be computed from regressed keypoints.
+ unmatched_keypoint_score: float, the default score to use for regressed
+ keypoints that are not successfully snapped to a nearby candidate.
+ box_scale: float, the multiplier to expand the bounding boxes (either the
+ provided boxes or those which tightly cover the regressed keypoints) for
+ an instance. This scale is typically larger than 1.0 when not providing
+ `bboxes`.
+ candidate_search_scale: float, the scale parameter that multiplies the
+ largest dimension of a bounding box. The resulting distance becomes a
+ search radius for candidates in the vicinity of each regressed keypoint.
+ candidate_ranking_mode: A string as one of ['min_distance',
+ 'score_distance_ratio'] indicating how to select the candidate. If invalid
+ value is provided, an ValueError will be raised.
+
+ Returns:
+ A tuple with:
+ refined_keypoints: A float tensor of shape
+ [batch_size, num_instances, num_keypoints, 2] with the final, refined
+ keypoints.
+ refined_scores: A float tensor of shape
+ [batch_size, num_instances, num_keypoints] with scores associated with all
+ instances and keypoints in `refined_keypoints`.
+
+ Raises:
+ ValueError: if provided candidate_ranking_mode is not one of
+ ['min_distance', 'score_distance_ratio']
+ """
+ batch_size, num_instances, num_keypoints, _ = (
+ shape_utils.combined_static_and_dynamic_shape(regressed_keypoints))
+ max_candidates = keypoint_candidates.shape[1]
+
+ # Replace all invalid (i.e. padded) keypoint candidates with NaN.
+ # This will prevent them from being considered.
+ range_tiled = tf.tile(
+ tf.reshape(tf.range(max_candidates), [1, max_candidates, 1]),
+ [batch_size, 1, num_keypoints])
+ num_candidates_tiled = tf.tile(tf.expand_dims(num_keypoint_candidates, 1),
+ [1, max_candidates, 1])
+ invalid_candidates = range_tiled >= num_candidates_tiled
+ nan_mask = tf.where(
+ invalid_candidates,
+ np.nan * tf.ones_like(invalid_candidates, dtype=tf.float32),
+ tf.ones_like(invalid_candidates, dtype=tf.float32))
+ keypoint_candidates_with_nans = tf.math.multiply(
+ keypoint_candidates, tf.expand_dims(nan_mask, -1))
+
+ # Pairwise squared distances between regressed keypoints and candidate
+ # keypoints (for a single keypoint type).
+ # Shape [batch_size, num_instances, max_candidates, num_keypoints].
+ regressed_keypoint_expanded = tf.expand_dims(regressed_keypoints,
+ axis=2)
+ keypoint_candidates_expanded = tf.expand_dims(
+ keypoint_candidates_with_nans, axis=1)
+ sqrd_distances = tf.math.reduce_sum(
+ tf.math.squared_difference(regressed_keypoint_expanded,
+ keypoint_candidates_expanded),
+ axis=-1)
+ distances = tf.math.sqrt(sqrd_distances)
+
+ # Determine the candidates that have the minimum distance to the regressed
+ # keypoints. Shape [batch_size, num_instances, num_keypoints].
+ min_distances = tf.math.reduce_min(distances, axis=2)
+ if candidate_ranking_mode == 'min_distance':
+ nearby_candidate_inds = tf.math.argmin(distances, axis=2)
+ elif candidate_ranking_mode == 'score_distance_ratio':
+ # tiled_keypoint_scores:
+ # Shape [batch_size, num_instances, max_candidates, num_keypoints].
+ tiled_keypoint_scores = tf.tile(
+ tf.expand_dims(keypoint_scores, axis=1),
+ multiples=[1, num_instances, 1, 1])
+ ranking_scores = tiled_keypoint_scores / (distances + 1e-6)
+ nearby_candidate_inds = tf.math.argmax(ranking_scores, axis=2)
+ else:
+ raise ValueError('Not recognized candidate_ranking_mode: %s' %
+ candidate_ranking_mode)
+
+ # Gather the coordinates and scores corresponding to the closest candidates.
+ # Shape of tensors are [batch_size, num_instances, num_keypoints, 2] and
+ # [batch_size, num_instances, num_keypoints], respectively.
+ nearby_candidate_coords, nearby_candidate_scores = (
+ _gather_candidates_at_indices(keypoint_candidates, keypoint_scores,
+ nearby_candidate_inds))
+
+ if bboxes is None:
+ # Create bboxes from regressed keypoints.
+ # Shape [batch_size * num_instances, 4].
+ regressed_keypoints_flattened = tf.reshape(
+ regressed_keypoints, [-1, num_keypoints, 2])
+ bboxes_flattened = keypoint_ops.keypoints_to_enclosing_bounding_boxes(
+ regressed_keypoints_flattened)
+ else:
+ bboxes_flattened = tf.reshape(bboxes, [-1, 4])
+
+ # Scale the bounding boxes.
+ # Shape [batch_size, num_instances, 4].
+ boxlist = box_list.BoxList(bboxes_flattened)
+ boxlist_scaled = box_list_ops.scale_height_width(
+ boxlist, box_scale, box_scale)
+ bboxes_scaled = boxlist_scaled.get()
+ bboxes = tf.reshape(bboxes_scaled, [batch_size, num_instances, 4])
+
+ # Get ymin, xmin, ymax, xmax bounding box coordinates, tiled per keypoint.
+ # Shape [batch_size, num_instances, num_keypoints].
+ bboxes_tiled = tf.tile(tf.expand_dims(bboxes, 2), [1, 1, num_keypoints, 1])
+ ymin, xmin, ymax, xmax = tf.unstack(bboxes_tiled, axis=3)
+
+ # Produce a mask that indicates whether the original regressed keypoint
+ # should be used instead of a candidate keypoint.
+ # Shape [batch_size, num_instances, num_keypoints].
+ search_radius = (
+ tf.math.maximum(ymax - ymin, xmax - xmin) * candidate_search_scale)
+ mask = (tf.cast(nearby_candidate_coords[:, :, :, 0] < ymin, tf.int32) +
+ tf.cast(nearby_candidate_coords[:, :, :, 0] > ymax, tf.int32) +
+ tf.cast(nearby_candidate_coords[:, :, :, 1] < xmin, tf.int32) +
+ tf.cast(nearby_candidate_coords[:, :, :, 1] > xmax, tf.int32) +
+ # Filter out the chosen candidate with score lower than unmatched
+ # keypoint score.
+ tf.cast(nearby_candidate_scores <
+ unmatched_keypoint_score, tf.int32) +
+ tf.cast(min_distances > search_radius, tf.int32))
+ mask = mask > 0
+
+ # Create refined keypoints where candidate keypoints replace original
+ # regressed keypoints if they are in the vicinity of the regressed keypoints.
+ # Shape [batch_size, num_instances, num_keypoints, 2].
+ refined_keypoints = tf.where(
+ tf.tile(tf.expand_dims(mask, -1), [1, 1, 1, 2]),
+ regressed_keypoints,
+ nearby_candidate_coords)
+
+ # Update keypoints scores. In the case where we use the original regressed
+ # keypoints, we use a default score of `unmatched_keypoint_score`.
+ # Shape [batch_size, num_instances, num_keypoints].
+ refined_scores = tf.where(
+ mask,
+ unmatched_keypoint_score * tf.ones_like(nearby_candidate_scores),
+ nearby_candidate_scores)
+
+ return refined_keypoints, refined_scores
+
+
+def _pad_to_full_keypoint_dim(keypoint_coords, keypoint_scores, keypoint_inds,
+ num_total_keypoints):
+ """Scatter keypoint elements into tensors with full keypoints dimension.
+
+ Args:
+ keypoint_coords: a [batch_size, num_instances, num_keypoints, 2] float32
+ tensor.
+ keypoint_scores: a [batch_size, num_instances, num_keypoints] float32
+ tensor.
+ keypoint_inds: a list of integers that indicate the keypoint indices for
+ this specific keypoint class. These indices are used to scatter into
+ tensors that have a `num_total_keypoints` dimension.
+ num_total_keypoints: The total number of keypoints that this model predicts.
+
+ Returns:
+ A tuple with
+ keypoint_coords_padded: a
+ [batch_size, num_instances, num_total_keypoints,2] float32 tensor.
+ keypoint_scores_padded: a [batch_size, num_instances, num_total_keypoints]
+ float32 tensor.
+ """
+ batch_size, num_instances, _, _ = (
+ shape_utils.combined_static_and_dynamic_shape(keypoint_coords))
+ kpt_coords_transposed = tf.transpose(keypoint_coords, [2, 0, 1, 3])
+ kpt_scores_transposed = tf.transpose(keypoint_scores, [2, 0, 1])
+ kpt_inds_tensor = tf.expand_dims(keypoint_inds, axis=-1)
+ kpt_coords_scattered = tf.scatter_nd(
+ indices=kpt_inds_tensor,
+ updates=kpt_coords_transposed,
+ shape=[num_total_keypoints, batch_size, num_instances, 2])
+ kpt_scores_scattered = tf.scatter_nd(
+ indices=kpt_inds_tensor,
+ updates=kpt_scores_transposed,
+ shape=[num_total_keypoints, batch_size, num_instances])
+ keypoint_coords_padded = tf.transpose(kpt_coords_scattered, [1, 2, 0, 3])
+ keypoint_scores_padded = tf.transpose(kpt_scores_scattered, [1, 2, 0])
+ return keypoint_coords_padded, keypoint_scores_padded
+
+
+def _pad_to_full_instance_dim(keypoint_coords, keypoint_scores, instance_inds,
+ max_instances):
+ """Scatter keypoint elements into tensors with full instance dimension.
+
+ Args:
+ keypoint_coords: a [batch_size, num_instances, num_keypoints, 2] float32
+ tensor.
+ keypoint_scores: a [batch_size, num_instances, num_keypoints] float32
+ tensor.
+ instance_inds: a list of integers that indicate the instance indices for
+ these keypoints. These indices are used to scatter into tensors
+ that have a `max_instances` dimension.
+ max_instances: The maximum number of instances detected by the model.
+
+ Returns:
+ A tuple with
+ keypoint_coords_padded: a [batch_size, max_instances, num_keypoints, 2]
+ float32 tensor.
+ keypoint_scores_padded: a [batch_size, max_instances, num_keypoints]
+ float32 tensor.
+ """
+ batch_size, _, num_keypoints, _ = (
+ shape_utils.combined_static_and_dynamic_shape(keypoint_coords))
+ kpt_coords_transposed = tf.transpose(keypoint_coords, [1, 0, 2, 3])
+ kpt_scores_transposed = tf.transpose(keypoint_scores, [1, 0, 2])
+ instance_inds = tf.expand_dims(instance_inds, axis=-1)
+ kpt_coords_scattered = tf.scatter_nd(
+ indices=instance_inds,
+ updates=kpt_coords_transposed,
+ shape=[max_instances, batch_size, num_keypoints, 2])
+ kpt_scores_scattered = tf.scatter_nd(
+ indices=instance_inds,
+ updates=kpt_scores_transposed,
+ shape=[max_instances, batch_size, num_keypoints])
+ keypoint_coords_padded = tf.transpose(kpt_coords_scattered, [1, 0, 2, 3])
+ keypoint_scores_padded = tf.transpose(kpt_scores_scattered, [1, 0, 2])
+ return keypoint_coords_padded, keypoint_scores_padded
+
+
+def _gather_candidates_at_indices(keypoint_candidates, keypoint_scores,
+ indices):
+ """Gathers keypoint candidate coordinates and scores at indices.
+
+ Args:
+ keypoint_candidates: a float tensor of shape [batch_size, max_candidates,
+ num_keypoints, 2] with candidate coordinates.
+ keypoint_scores: a float tensor of shape [batch_size, max_candidates,
+ num_keypoints] with keypoint scores.
+ indices: an integer tensor of shape [batch_size, num_indices, num_keypoints]
+ with indices.
+
+ Returns:
+ A tuple with
+ gathered_keypoint_candidates: a float tensor of shape [batch_size,
+ num_indices, num_keypoints, 2] with gathered coordinates.
+ gathered_keypoint_scores: a float tensor of shape [batch_size,
+ num_indices, num_keypoints, 2].
+ """
+ # Transpose tensors so that all batch dimensions are up front.
+ keypoint_candidates_transposed = tf.transpose(keypoint_candidates,
+ [0, 2, 1, 3])
+ keypoint_scores_transposed = tf.transpose(keypoint_scores, [0, 2, 1])
+ nearby_candidate_inds_transposed = tf.transpose(indices,
+ [0, 2, 1])
+ nearby_candidate_coords_tranposed = tf.gather(
+ keypoint_candidates_transposed, nearby_candidate_inds_transposed,
+ batch_dims=2)
+ nearby_candidate_scores_transposed = tf.gather(
+ keypoint_scores_transposed, nearby_candidate_inds_transposed,
+ batch_dims=2)
+ gathered_keypoint_candidates = tf.transpose(nearby_candidate_coords_tranposed,
+ [0, 2, 1, 3])
+ gathered_keypoint_scores = tf.transpose(nearby_candidate_scores_transposed,
+ [0, 2, 1])
+ return gathered_keypoint_candidates, gathered_keypoint_scores
+
+
+def flattened_indices_from_row_col_indices(row_indices, col_indices, num_cols):
+ """Get the index in a flattened array given row and column indices."""
+ return (row_indices * num_cols) + col_indices
+
+
+def row_col_channel_indices_from_flattened_indices(indices, num_cols,
+ num_channels):
+ """Computes row, column and channel indices from flattened indices.
+
+ Args:
+ indices: An integer tensor of any shape holding the indices in the flattened
+ space.
+ num_cols: Number of columns in the image (width).
+ num_channels: Number of channels in the image.
+
+ Returns:
+ row_indices: The row indices corresponding to each of the input indices.
+ Same shape as indices.
+ col_indices: The column indices corresponding to each of the input indices.
+ Same shape as indices.
+ channel_indices. The channel indices corresponding to each of the input
+ indices.
+
+ """
+ row_indices = (indices // num_channels) // num_cols
+ col_indices = (indices // num_channels) % num_cols
+ channel_indices = indices % num_channels
+
+ return row_indices, col_indices, channel_indices
+
+
+def get_valid_anchor_weights_in_flattened_image(true_image_shapes, height,
+ width):
+ """Computes valid anchor weights for an image assuming pixels will be flattened.
+
+ This function is useful when we only want to penalize valid areas in the
+ image in the case when padding is used. The function assumes that the loss
+ function will be applied after flattening the spatial dimensions and returns
+ anchor weights accordingly.
+
+ Args:
+ true_image_shapes: An integer tensor of shape [batch_size, 3] representing
+ the true image shape (without padding) for each sample in the batch.
+ height: height of the prediction from the network.
+ width: width of the prediction from the network.
+
+ Returns:
+ valid_anchor_weights: a float tensor of shape [batch_size, height * width]
+ with 1s in locations where the spatial coordinates fall within the height
+ and width in true_image_shapes.
+ """
+
+ indices = tf.reshape(tf.range(height * width), [1, -1])
+ batch_size = tf.shape(true_image_shapes)[0]
+ batch_indices = tf.ones((batch_size, 1), dtype=tf.int32) * indices
+
+ y_coords, x_coords, _ = row_col_channel_indices_from_flattened_indices(
+ batch_indices, width, 1)
+
+ max_y, max_x = true_image_shapes[:, 0], true_image_shapes[:, 1]
+ max_x = _to_float32(tf.expand_dims(max_x, 1))
+ max_y = _to_float32(tf.expand_dims(max_y, 1))
+
+ x_coords = _to_float32(x_coords)
+ y_coords = _to_float32(y_coords)
+
+ valid_mask = tf.math.logical_and(x_coords < max_x, y_coords < max_y)
+
+ return _to_float32(valid_mask)
+
+
+def convert_strided_predictions_to_normalized_boxes(boxes, stride,
+ true_image_shapes):
+ """Converts predictions in the output space to normalized boxes.
+
+ Boxes falling outside the valid image boundary are clipped to be on the
+ boundary.
+
+ Args:
+ boxes: A tensor of shape [batch_size, num_boxes, 4] holding the raw
+ coordinates of boxes in the model's output space.
+ stride: The stride in the output space.
+ true_image_shapes: A tensor of shape [batch_size, 3] representing the true
+ shape of the input not considering padding.
+
+ Returns:
+ boxes: A tensor of shape [batch_size, num_boxes, 4] representing the
+ coordinates of the normalized boxes.
+ """
+
+ def _normalize_boxlist(args):
+
+ boxes, height, width = args
+ boxes = box_list_ops.scale(boxes, stride, stride)
+ boxes = box_list_ops.to_normalized_coordinates(boxes, height, width)
+ boxes = box_list_ops.clip_to_window(boxes, [0., 0., 1., 1.],
+ filter_nonoverlapping=False)
+ return boxes
+
+ box_lists = [box_list.BoxList(boxes) for boxes in tf.unstack(boxes, axis=0)]
+ true_heights, true_widths, _ = tf.unstack(true_image_shapes, axis=1)
+
+ true_heights_list = tf.unstack(true_heights, axis=0)
+ true_widths_list = tf.unstack(true_widths, axis=0)
+
+ box_lists = list(map(_normalize_boxlist,
+ zip(box_lists, true_heights_list, true_widths_list)))
+ boxes = tf.stack([box_list_instance.get() for
+ box_list_instance in box_lists], axis=0)
+
+ return boxes
+
+
+def convert_strided_predictions_to_normalized_keypoints(
+ keypoint_coords, keypoint_scores, stride, true_image_shapes,
+ clip_out_of_frame_keypoints=False):
+ """Converts predictions in the output space to normalized keypoints.
+
+ If clip_out_of_frame_keypoints=False, keypoint coordinates falling outside
+ the valid image boundary are normalized but not clipped; If
+ clip_out_of_frame_keypoints=True, keypoint coordinates falling outside the
+ valid image boundary are clipped to the closest image boundary and the scores
+ will be set to 0.0.
+
+ Args:
+ keypoint_coords: A tensor of shape
+ [batch_size, num_instances, num_keypoints, 2] holding the raw coordinates
+ of keypoints in the model's output space.
+ keypoint_scores: A tensor of shape
+ [batch_size, num_instances, num_keypoints] holding the keypoint scores.
+ stride: The stride in the output space.
+ true_image_shapes: A tensor of shape [batch_size, 3] representing the true
+ shape of the input not considering padding.
+ clip_out_of_frame_keypoints: A boolean indicating whether keypoints outside
+ the image boundary should be clipped. If True, keypoint coords will be
+ clipped to image boundary. If False, keypoints are normalized but not
+ filtered based on their location.
+
+ Returns:
+ keypoint_coords_normalized: A tensor of shape
+ [batch_size, num_instances, num_keypoints, 2] representing the coordinates
+ of the normalized keypoints.
+ keypoint_scores: A tensor of shape
+ [batch_size, num_instances, num_keypoints] representing the updated
+ keypoint scores.
+ """
+ # Flatten keypoints and scores.
+ batch_size, _, _, _ = (
+ shape_utils.combined_static_and_dynamic_shape(keypoint_coords))
+
+ # Scale and normalize keypoints.
+ true_heights, true_widths, _ = tf.unstack(true_image_shapes, axis=1)
+ yscale = float(stride) / tf.cast(true_heights, tf.float32)
+ xscale = float(stride) / tf.cast(true_widths, tf.float32)
+ yx_scale = tf.stack([yscale, xscale], axis=1)
+ keypoint_coords_normalized = keypoint_coords * tf.reshape(
+ yx_scale, [batch_size, 1, 1, 2])
+
+ if clip_out_of_frame_keypoints:
+ # Determine the keypoints that are in the true image regions.
+ valid_indices = tf.logical_and(
+ tf.logical_and(keypoint_coords_normalized[:, :, :, 0] >= 0.0,
+ keypoint_coords_normalized[:, :, :, 0] <= 1.0),
+ tf.logical_and(keypoint_coords_normalized[:, :, :, 1] >= 0.0,
+ keypoint_coords_normalized[:, :, :, 1] <= 1.0))
+ batch_window = tf.tile(
+ tf.constant([[0.0, 0.0, 1.0, 1.0]], dtype=tf.float32),
+ multiples=[batch_size, 1])
+ def clip_to_window(inputs):
+ keypoints, window = inputs
+ return keypoint_ops.clip_to_window(keypoints, window)
+ keypoint_coords_normalized = tf.map_fn(
+ clip_to_window, (keypoint_coords_normalized, batch_window),
+ dtype=tf.float32, back_prop=False)
+ keypoint_scores = tf.where(valid_indices, keypoint_scores,
+ tf.zeros_like(keypoint_scores))
+ return keypoint_coords_normalized, keypoint_scores
+
+
+def convert_strided_predictions_to_instance_masks(
+ boxes, classes, masks, stride, mask_height, mask_width,
+ true_image_shapes, score_threshold=0.5):
+ """Converts predicted full-image masks into instance masks.
+
+ For each predicted detection box:
+ * Crop and resize the predicted mask based on the detected bounding box
+ coordinates and class prediction. Uses bilinear resampling.
+ * Binarize the mask using the provided score threshold.
+
+ Args:
+ boxes: A tensor of shape [batch, max_detections, 4] holding the predicted
+ boxes, in normalized coordinates (relative to the true image dimensions).
+ classes: An integer tensor of shape [batch, max_detections] containing the
+ detected class for each box (0-indexed).
+ masks: A [batch, output_height, output_width, num_classes] float32
+ tensor with class probabilities.
+ stride: The stride in the output space.
+ mask_height: The desired resized height for instance masks.
+ mask_width: The desired resized width for instance masks.
+ true_image_shapes: A tensor of shape [batch, 3] representing the true
+ shape of the inputs not considering padding.
+ score_threshold: The threshold at which to convert predicted mask
+ into foreground pixels.
+
+ Returns:
+ A [batch_size, max_detections, mask_height, mask_width] uint8 tensor with
+ predicted foreground mask for each instance. The masks take values in
+ {0, 1}.
+ """
+ _, output_height, output_width, _ = (
+ shape_utils.combined_static_and_dynamic_shape(masks))
+ input_height = stride * output_height
+ input_width = stride * output_width
+
+ # Boxes are in normalized coordinates relative to true image shapes. Convert
+ # coordinates to be normalized relative to input image shapes (since masks
+ # may still have padding).
+ # Then crop and resize each mask.
+ def crop_and_threshold_masks(args):
+ """Crops masks based on detection boxes."""
+ boxes, classes, masks, true_height, true_width = args
+ boxlist = box_list.BoxList(boxes)
+ y_scale = true_height / input_height
+ x_scale = true_width / input_width
+ boxlist = box_list_ops.scale(boxlist, y_scale, x_scale)
+ boxes = boxlist.get()
+ # Convert masks from [input_height, input_width, num_classes] to
+ # [num_classes, input_height, input_width, 1].
+ masks_4d = tf.transpose(masks, perm=[2, 0, 1])[:, :, :, tf.newaxis]
+ cropped_masks = tf2.image.crop_and_resize(
+ masks_4d,
+ boxes=boxes,
+ box_indices=classes,
+ crop_size=[mask_height, mask_width],
+ method='bilinear')
+ masks_3d = tf.squeeze(cropped_masks, axis=3)
+ masks_binarized = tf.math.greater_equal(masks_3d, score_threshold)
+ return tf.cast(masks_binarized, tf.uint8)
+
+ true_heights, true_widths, _ = tf.unstack(true_image_shapes, axis=1)
+ masks_for_image = shape_utils.static_or_dynamic_map_fn(
+ crop_and_threshold_masks,
+ elems=[boxes, classes, masks, true_heights, true_widths],
+ dtype=tf.uint8,
+ back_prop=False)
+ masks = tf.stack(masks_for_image, axis=0)
+ return masks
+
+
+class ObjectDetectionParams(
+ collections.namedtuple('ObjectDetectionParams', [
+ 'localization_loss', 'scale_loss_weight', 'offset_loss_weight',
+ 'task_loss_weight'
+ ])):
+ """Namedtuple to host object detection related parameters.
+
+ This is a wrapper class over the fields that are either the hyper-parameters
+ or the loss functions needed for the object detection task. The class is
+ immutable after constructed. Please see the __new__ function for detailed
+ information for each fields.
+ """
+
+ __slots__ = ()
+
+ def __new__(cls,
+ localization_loss,
+ scale_loss_weight,
+ offset_loss_weight,
+ task_loss_weight=1.0):
+ """Constructor with default values for ObjectDetectionParams.
+
+ Args:
+ localization_loss: a object_detection.core.losses.Loss object to compute
+ the loss for the center offset and height/width predictions in
+ CenterNet.
+ scale_loss_weight: float, The weight for localizing box size. Note that
+ the scale loss is dependent on the input image size, since we penalize
+ the raw height and width. This constant may need to be adjusted
+ depending on the input size.
+ offset_loss_weight: float, The weight for localizing center offsets.
+ task_loss_weight: float, the weight of the object detection loss.
+
+ Returns:
+ An initialized ObjectDetectionParams namedtuple.
+ """
+ return super(ObjectDetectionParams,
+ cls).__new__(cls, localization_loss, scale_loss_weight,
+ offset_loss_weight, task_loss_weight)
+
+
+class KeypointEstimationParams(
+ collections.namedtuple('KeypointEstimationParams', [
+ 'task_name', 'class_id', 'keypoint_indices', 'classification_loss',
+ 'localization_loss', 'keypoint_labels', 'keypoint_std_dev',
+ 'keypoint_heatmap_loss_weight', 'keypoint_offset_loss_weight',
+ 'keypoint_regression_loss_weight', 'keypoint_candidate_score_threshold',
+ 'heatmap_bias_init', 'num_candidates_per_keypoint', 'task_loss_weight',
+ 'peak_max_pool_kernel_size', 'unmatched_keypoint_score', 'box_scale',
+ 'candidate_search_scale', 'candidate_ranking_mode',
+ 'offset_peak_radius', 'per_keypoint_offset'
+ ])):
+ """Namedtuple to host object detection related parameters.
+
+ This is a wrapper class over the fields that are either the hyper-parameters
+ or the loss functions needed for the keypoint estimation task. The class is
+ immutable after constructed. Please see the __new__ function for detailed
+ information for each fields.
+ """
+
+ __slots__ = ()
+
+ def __new__(cls,
+ task_name,
+ class_id,
+ keypoint_indices,
+ classification_loss,
+ localization_loss,
+ keypoint_labels=None,
+ keypoint_std_dev=None,
+ keypoint_heatmap_loss_weight=1.0,
+ keypoint_offset_loss_weight=1.0,
+ keypoint_regression_loss_weight=1.0,
+ keypoint_candidate_score_threshold=0.1,
+ heatmap_bias_init=-2.19,
+ num_candidates_per_keypoint=100,
+ task_loss_weight=1.0,
+ peak_max_pool_kernel_size=3,
+ unmatched_keypoint_score=0.1,
+ box_scale=1.2,
+ candidate_search_scale=0.3,
+ candidate_ranking_mode='min_distance',
+ offset_peak_radius=0,
+ per_keypoint_offset=False):
+ """Constructor with default values for KeypointEstimationParams.
+
+ Args:
+ task_name: string, the name of the task this namedtuple corresponds to.
+ Note that it should be an unique identifier of the task.
+ class_id: int, the ID of the class that contains the target keypoints to
+ considered in this task. For example, if the task is human pose
+ estimation, the class id should correspond to the "human" class. Note
+ that the ID is 0-based, meaning that class 0 corresponds to the first
+ non-background object class.
+ keypoint_indices: A list of integers representing the indicies of the
+ keypoints to be considered in this task. This is used to retrieve the
+ subset of the keypoints from gt_keypoints that should be considered in
+ this task.
+ classification_loss: an object_detection.core.losses.Loss object to
+ compute the loss for the class predictions in CenterNet.
+ localization_loss: an object_detection.core.losses.Loss object to compute
+ the loss for the center offset and height/width predictions in
+ CenterNet.
+ keypoint_labels: A list of strings representing the label text of each
+ keypoint, e.g. "nose", 'left_shoulder". Note that the length of this
+ list should be equal to keypoint_indices.
+ keypoint_std_dev: A list of float represent the standard deviation of the
+ Gaussian kernel used to generate the keypoint heatmap. It is to provide
+ the flexibility of using different sizes of Gaussian kernel for each
+ keypoint class.
+ keypoint_heatmap_loss_weight: float, The weight for the keypoint heatmap.
+ keypoint_offset_loss_weight: float, The weight for the keypoint offsets
+ loss.
+ keypoint_regression_loss_weight: float, The weight for keypoint regression
+ loss. Note that the loss is dependent on the input image size, since we
+ penalize the raw height and width. This constant may need to be adjusted
+ depending on the input size.
+ keypoint_candidate_score_threshold: float, The heatmap score threshold for
+ a keypoint to become a valid candidate.
+ heatmap_bias_init: float, the initial value of bias in the convolutional
+ kernel of the class prediction head. If set to None, the bias is
+ initialized with zeros.
+ num_candidates_per_keypoint: The maximum number of candidates to retrieve
+ for each keypoint.
+ task_loss_weight: float, the weight of the keypoint estimation loss.
+ peak_max_pool_kernel_size: Max pool kernel size to use to pull off peak
+ score locations in a neighborhood (independently for each keypoint
+ types).
+ unmatched_keypoint_score: The default score to use for regressed keypoints
+ that are not successfully snapped to a nearby candidate.
+ box_scale: The multiplier to expand the bounding boxes (either the
+ provided boxes or those which tightly cover the regressed keypoints).
+ candidate_search_scale: The scale parameter that multiplies the largest
+ dimension of a bounding box. The resulting distance becomes a search
+ radius for candidates in the vicinity of each regressed keypoint.
+ candidate_ranking_mode: One of ['min_distance', 'score_distance_ratio']
+ indicating how to select the keypoint candidate.
+ offset_peak_radius: The radius (in the unit of output pixel) around
+ groundtruth heatmap peak to assign the offset targets. If set 0, then
+ the offset target will only be assigned to the heatmap peak (same
+ behavior as the original paper).
+ per_keypoint_offset: A bool indicates whether to assign offsets for each
+ keypoint channel separately. If set False, the output offset target has
+ the shape [batch_size, out_height, out_width, 2] (same behavior as the
+ original paper). If set True, the output offset target has the shape
+ [batch_size, out_height, out_width, 2 * num_keypoints] (recommended when
+ the offset_peak_radius is not zero).
+
+ Returns:
+ An initialized KeypointEstimationParams namedtuple.
+ """
+ return super(KeypointEstimationParams, cls).__new__(
+ cls, task_name, class_id, keypoint_indices, classification_loss,
+ localization_loss, keypoint_labels, keypoint_std_dev,
+ keypoint_heatmap_loss_weight, keypoint_offset_loss_weight,
+ keypoint_regression_loss_weight, keypoint_candidate_score_threshold,
+ heatmap_bias_init, num_candidates_per_keypoint, task_loss_weight,
+ peak_max_pool_kernel_size, unmatched_keypoint_score, box_scale,
+ candidate_search_scale, candidate_ranking_mode, offset_peak_radius,
+ per_keypoint_offset)
+
+
+class ObjectCenterParams(
+ collections.namedtuple('ObjectCenterParams', [
+ 'classification_loss', 'object_center_loss_weight', 'heatmap_bias_init',
+ 'min_box_overlap_iou', 'max_box_predictions', 'use_only_known_classes'
+ ])):
+ """Namedtuple to store object center prediction related parameters."""
+
+ __slots__ = ()
+
+ def __new__(cls,
+ classification_loss,
+ object_center_loss_weight,
+ heatmap_bias_init=-2.19,
+ min_box_overlap_iou=0.7,
+ max_box_predictions=100,
+ use_labeled_classes=False):
+ """Constructor with default values for ObjectCenterParams.
+
+ Args:
+ classification_loss: an object_detection.core.losses.Loss object to
+ compute the loss for the class predictions in CenterNet.
+ object_center_loss_weight: float, The weight for the object center loss.
+ heatmap_bias_init: float, the initial value of bias in the convolutional
+ kernel of the object center prediction head. If set to None, the bias is
+ initialized with zeros.
+ min_box_overlap_iou: float, the minimum IOU overlap that predicted boxes
+ need have with groundtruth boxes to not be penalized. This is used for
+ computing the class specific center heatmaps.
+ max_box_predictions: int, the maximum number of boxes to predict.
+ use_labeled_classes: boolean, compute the loss only labeled classes.
+
+ Returns:
+ An initialized ObjectCenterParams namedtuple.
+ """
+ return super(ObjectCenterParams,
+ cls).__new__(cls, classification_loss,
+ object_center_loss_weight, heatmap_bias_init,
+ min_box_overlap_iou, max_box_predictions,
+ use_labeled_classes)
+
+
+class MaskParams(
+ collections.namedtuple('MaskParams', [
+ 'classification_loss', 'task_loss_weight', 'mask_height', 'mask_width',
+ 'score_threshold', 'heatmap_bias_init'
+ ])):
+ """Namedtuple to store mask prediction related parameters."""
+
+ __slots__ = ()
+
+ def __new__(cls,
+ classification_loss,
+ task_loss_weight=1.0,
+ mask_height=256,
+ mask_width=256,
+ score_threshold=0.5,
+ heatmap_bias_init=-2.19):
+ """Constructor with default values for MaskParams.
+
+ Args:
+ classification_loss: an object_detection.core.losses.Loss object to
+ compute the loss for the semantic segmentation predictions in CenterNet.
+ task_loss_weight: float, The loss weight for the segmentation task.
+ mask_height: The height of the resized instance segmentation mask.
+ mask_width: The width of the resized instance segmentation mask.
+ score_threshold: The threshold at which to convert predicted mask
+ probabilities (after passing through sigmoid) into foreground pixels.
+ heatmap_bias_init: float, the initial value of bias in the convolutional
+ kernel of the semantic segmentation prediction head. If set to None, the
+ bias is initialized with zeros.
+
+ Returns:
+ An initialized MaskParams namedtuple.
+ """
+ return super(MaskParams,
+ cls).__new__(cls, classification_loss,
+ task_loss_weight, mask_height, mask_width,
+ score_threshold, heatmap_bias_init)
+
+
+# The following constants are used to generate the keys of the
+# (prediction, loss, target assigner,...) dictionaries used in CenterNetMetaArch
+# class.
+DETECTION_TASK = 'detection_task'
+OBJECT_CENTER = 'object_center'
+BOX_SCALE = 'box/scale'
+BOX_OFFSET = 'box/offset'
+KEYPOINT_REGRESSION = 'keypoint/regression'
+KEYPOINT_HEATMAP = 'keypoint/heatmap'
+KEYPOINT_OFFSET = 'keypoint/offset'
+SEGMENTATION_TASK = 'segmentation_task'
+SEGMENTATION_HEATMAP = 'segmentation/heatmap'
+LOSS_KEY_PREFIX = 'Loss'
+
+
+def get_keypoint_name(task_name, head_name):
+ return '%s/%s' % (task_name, head_name)
+
+
+def get_num_instances_from_weights(groundtruth_weights_list):
+ """Computes the number of instances/boxes from the weights in a batch.
+
+ Args:
+ groundtruth_weights_list: A list of float tensors with shape
+ [max_num_instances] representing whether there is an actual instance in
+ the image (with non-zero value) or is padded to match the
+ max_num_instances (with value 0.0). The list represents the batch
+ dimension.
+
+ Returns:
+ A scalar integer tensor incidating how many instances/boxes are in the
+ images in the batch. Note that this function is usually used to normalize
+ the loss so the minimum return value is 1 to avoid weird behavior.
+ """
+ num_instances = tf.reduce_sum(
+ [tf.math.count_nonzero(w) for w in groundtruth_weights_list])
+ num_instances = tf.maximum(num_instances, 1)
+ return num_instances
+
+
+class CenterNetMetaArch(model.DetectionModel):
+ """The CenterNet meta architecture [1].
+
+ [1]: https://arxiv.org/abs/1904.07850
+ """
+
+ def __init__(self,
+ is_training,
+ add_summaries,
+ num_classes,
+ feature_extractor,
+ image_resizer_fn,
+ object_center_params,
+ object_detection_params=None,
+ keypoint_params_dict=None,
+ mask_params=None):
+ """Initializes a CenterNet model.
+
+ Args:
+ is_training: Set to True if this model is being built for training.
+ add_summaries: Whether to add tf summaries in the model.
+ num_classes: int, The number of classes that the model should predict.
+ feature_extractor: A CenterNetFeatureExtractor to use to extract features
+ from an image.
+ image_resizer_fn: a callable for image resizing. This callable always
+ takes a rank-3 image tensor (corresponding to a single image) and
+ returns a rank-3 image tensor, possibly with new spatial dimensions and
+ a 1-D tensor of shape [3] indicating shape of true image within the
+ resized image tensor as the resized image tensor could be padded. See
+ builders/image_resizer_builder.py.
+ object_center_params: An ObjectCenterParams namedtuple. This object holds
+ the hyper-parameters for object center prediction. This is required by
+ either object detection or keypoint estimation tasks.
+ object_detection_params: An ObjectDetectionParams namedtuple. This object
+ holds the hyper-parameters necessary for object detection. Please see
+ the class definition for more details.
+ keypoint_params_dict: A dictionary that maps from task name to the
+ corresponding KeypointEstimationParams namedtuple. This object holds the
+ hyper-parameters necessary for multiple keypoint estimations. Please
+ see the class definition for more details.
+ mask_params: A MaskParams namedtuple. This object
+ holds the hyper-parameters for segmentation. Please see the class
+ definition for more details.
+ """
+ assert object_detection_params or keypoint_params_dict
+ # Shorten the name for convenience and better formatting.
+ self._is_training = is_training
+ # The Objects as Points paper attaches loss functions to multiple
+ # (`num_feature_outputs`) feature maps in the the backbone. E.g.
+ # for the hourglass backbone, `num_feature_outputs` is 2.
+ self._feature_extractor = feature_extractor
+ self._num_feature_outputs = feature_extractor.num_feature_outputs
+ self._stride = self._feature_extractor.out_stride
+ self._image_resizer_fn = image_resizer_fn
+ self._center_params = object_center_params
+ self._od_params = object_detection_params
+ self._kp_params_dict = keypoint_params_dict
+ self._mask_params = mask_params
+
+ # Construct the prediction head nets.
+ self._prediction_head_dict = self._construct_prediction_heads(
+ num_classes,
+ self._num_feature_outputs,
+ class_prediction_bias_init=self._center_params.heatmap_bias_init)
+ # Initialize the target assigners.
+ self._target_assigner_dict = self._initialize_target_assigners(
+ stride=self._stride,
+ min_box_overlap_iou=self._center_params.min_box_overlap_iou)
+
+ # Will be used in VOD single_frame_meta_arch for tensor reshape.
+ self._batched_prediction_tensor_names = []
+
+ super(CenterNetMetaArch, self).__init__(num_classes)
+
+ @property
+ def batched_prediction_tensor_names(self):
+ if not self._batched_prediction_tensor_names:
+ raise RuntimeError('Must call predict() method to get batched prediction '
+ 'tensor names.')
+ return self._batched_prediction_tensor_names
+
+ def _construct_prediction_heads(self, num_classes, num_feature_outputs,
+ class_prediction_bias_init):
+ """Constructs the prediction heads based on the specific parameters.
+
+ Args:
+ num_classes: An integer indicating how many classes in total to predict.
+ num_feature_outputs: An integer indicating how many feature outputs to use
+ for calculating the loss. The Objects as Points paper attaches loss
+ functions to multiple (`num_feature_outputs`) feature maps in the the
+ backbone. E.g. for the hourglass backbone, `num_feature_outputs` is 2.
+ class_prediction_bias_init: float, the initial value of bias in the
+ convolutional kernel of the class prediction head. If set to None, the
+ bias is initialized with zeros.
+
+ Returns:
+ A dictionary of keras modules generated by calling make_prediction_net
+ function.
+ """
+ prediction_heads = {}
+ prediction_heads[OBJECT_CENTER] = [
+ make_prediction_net(num_classes, bias_fill=class_prediction_bias_init)
+ for _ in range(num_feature_outputs)
+ ]
+ if self._od_params is not None:
+ prediction_heads[BOX_SCALE] = [
+ make_prediction_net(NUM_SIZE_CHANNELS)
+ for _ in range(num_feature_outputs)
+ ]
+ prediction_heads[BOX_OFFSET] = [
+ make_prediction_net(NUM_OFFSET_CHANNELS)
+ for _ in range(num_feature_outputs)
+ ]
+ if self._kp_params_dict is not None:
+ for task_name, kp_params in self._kp_params_dict.items():
+ num_keypoints = len(kp_params.keypoint_indices)
+ prediction_heads[get_keypoint_name(task_name, KEYPOINT_HEATMAP)] = [
+ make_prediction_net(
+ num_keypoints, bias_fill=kp_params.heatmap_bias_init)
+ for _ in range(num_feature_outputs)
+ ]
+ prediction_heads[get_keypoint_name(task_name, KEYPOINT_REGRESSION)] = [
+ make_prediction_net(NUM_OFFSET_CHANNELS * num_keypoints)
+ for _ in range(num_feature_outputs)
+ ]
+ if kp_params.per_keypoint_offset:
+ prediction_heads[get_keypoint_name(task_name, KEYPOINT_OFFSET)] = [
+ make_prediction_net(NUM_OFFSET_CHANNELS * num_keypoints)
+ for _ in range(num_feature_outputs)
+ ]
+ else:
+ prediction_heads[get_keypoint_name(task_name, KEYPOINT_OFFSET)] = [
+ make_prediction_net(NUM_OFFSET_CHANNELS)
+ for _ in range(num_feature_outputs)
+ ]
+ if self._mask_params is not None:
+ prediction_heads[SEGMENTATION_HEATMAP] = [
+ make_prediction_net(num_classes,
+ bias_fill=class_prediction_bias_init)
+ for _ in range(num_feature_outputs)]
+ return prediction_heads
+
+ def _initialize_target_assigners(self, stride, min_box_overlap_iou):
+ """Initializes the target assigners and puts them in a dictionary.
+
+ Args:
+ stride: An integer indicating the stride of the image.
+ min_box_overlap_iou: float, the minimum IOU overlap that predicted boxes
+ need have with groundtruth boxes to not be penalized. This is used for
+ computing the class specific center heatmaps.
+
+ Returns:
+ A dictionary of initialized target assigners for each task.
+ """
+ target_assigners = {}
+ target_assigners[OBJECT_CENTER] = (
+ cn_assigner.CenterNetCenterHeatmapTargetAssigner(
+ stride, min_box_overlap_iou))
+ if self._od_params is not None:
+ target_assigners[DETECTION_TASK] = (
+ cn_assigner.CenterNetBoxTargetAssigner(stride))
+ if self._kp_params_dict is not None:
+ for task_name, kp_params in self._kp_params_dict.items():
+ target_assigners[task_name] = (
+ cn_assigner.CenterNetKeypointTargetAssigner(
+ stride=stride,
+ class_id=kp_params.class_id,
+ keypoint_indices=kp_params.keypoint_indices,
+ keypoint_std_dev=kp_params.keypoint_std_dev,
+ peak_radius=kp_params.offset_peak_radius,
+ per_keypoint_offset=kp_params.per_keypoint_offset))
+ if self._mask_params is not None:
+ target_assigners[SEGMENTATION_TASK] = (
+ cn_assigner.CenterNetMaskTargetAssigner(stride))
+
+ return target_assigners
+
+ def _compute_object_center_loss(self, input_height, input_width,
+ object_center_predictions, per_pixel_weights):
+ """Computes the object center loss.
+
+ Args:
+ input_height: An integer scalar tensor representing input image height.
+ input_width: An integer scalar tensor representing input image width.
+ object_center_predictions: A list of float tensors of shape [batch_size,
+ out_height, out_width, num_classes] representing the object center
+ feature maps.
+ per_pixel_weights: A float tensor of shape [batch_size,
+ out_height * out_width, 1] with 1s in locations where the spatial
+ coordinates fall within the height and width in true_image_shapes.
+
+ Returns:
+ A float scalar tensor representing the object center loss per instance.
+ """
+ gt_boxes_list = self.groundtruth_lists(fields.BoxListFields.boxes)
+ gt_classes_list = self.groundtruth_lists(fields.BoxListFields.classes)
+ gt_weights_list = self.groundtruth_lists(fields.BoxListFields.weights)
+
+ if self._center_params.use_only_known_classes:
+ gt_labeled_classes_list = self.groundtruth_lists(
+ fields.InputDataFields.groundtruth_labeled_classes)
+ batch_labeled_classes = tf.stack(gt_labeled_classes_list, axis=0)
+ batch_labeled_classes_shape = tf.shape(batch_labeled_classes)
+ batch_labeled_classes = tf.reshape(
+ batch_labeled_classes,
+ [batch_labeled_classes_shape[0], 1, batch_labeled_classes_shape[-1]])
+ per_pixel_weights = per_pixel_weights * batch_labeled_classes
+
+ # Convert the groundtruth to targets.
+ assigner = self._target_assigner_dict[OBJECT_CENTER]
+ heatmap_targets = assigner.assign_center_targets_from_boxes(
+ height=input_height,
+ width=input_width,
+ gt_boxes_list=gt_boxes_list,
+ gt_classes_list=gt_classes_list,
+ gt_weights_list=gt_weights_list)
+
+ flattened_heatmap_targets = _flatten_spatial_dimensions(heatmap_targets)
+ num_boxes = _to_float32(get_num_instances_from_weights(gt_weights_list))
+
+ loss = 0.0
+ object_center_loss = self._center_params.classification_loss
+ # Loop through each feature output head.
+ for pred in object_center_predictions:
+ pred = _flatten_spatial_dimensions(pred)
+ loss += object_center_loss(
+ pred, flattened_heatmap_targets, weights=per_pixel_weights)
+ loss_per_instance = tf.reduce_sum(loss) / (
+ float(len(object_center_predictions)) * num_boxes)
+ return loss_per_instance
+
+ def _compute_object_detection_losses(self, input_height, input_width,
+ prediction_dict, per_pixel_weights):
+ """Computes the weighted object detection losses.
+
+ This wrapper function calls the function which computes the losses for
+ object detection task and applies corresponding weights to the losses.
+
+ Args:
+ input_height: An integer scalar tensor representing input image height.
+ input_width: An integer scalar tensor representing input image width.
+ prediction_dict: A dictionary holding predicted tensors output by
+ "predict" function. See "predict" function for more detailed
+ description.
+ per_pixel_weights: A float tensor of shape [batch_size,
+ out_height * out_width, 1] with 1s in locations where the spatial
+ coordinates fall within the height and width in true_image_shapes.
+
+ Returns:
+ A dictionary of scalar float tensors representing the weighted losses for
+ object detection task:
+ BOX_SCALE: the weighted scale (height/width) loss.
+ BOX_OFFSET: the weighted object offset loss.
+ """
+ od_scale_loss, od_offset_loss = self._compute_box_scale_and_offset_loss(
+ scale_predictions=prediction_dict[BOX_SCALE],
+ offset_predictions=prediction_dict[BOX_OFFSET],
+ input_height=input_height,
+ input_width=input_width)
+ loss_dict = {}
+ loss_dict[BOX_SCALE] = (
+ self._od_params.scale_loss_weight * od_scale_loss)
+ loss_dict[BOX_OFFSET] = (
+ self._od_params.offset_loss_weight * od_offset_loss)
+ return loss_dict
+
+ def _compute_box_scale_and_offset_loss(self, input_height, input_width,
+ scale_predictions, offset_predictions):
+ """Computes the scale loss of the object detection task.
+
+ Args:
+ input_height: An integer scalar tensor representing input image height.
+ input_width: An integer scalar tensor representing input image width.
+ scale_predictions: A list of float tensors of shape [batch_size,
+ out_height, out_width, 2] representing the prediction heads of the model
+ for object scale (i.e height and width).
+ offset_predictions: A list of float tensors of shape [batch_size,
+ out_height, out_width, 2] representing the prediction heads of the model
+ for object offset.
+
+ Returns:
+ A tuple of two losses:
+ scale_loss: A float scalar tensor representing the object height/width
+ loss normalized by total number of boxes.
+ offset_loss: A float scalar tensor representing the object offset loss
+ normalized by total number of boxes
+ """
+ # TODO(vighneshb) Explore a size invariant version of scale loss.
+ gt_boxes_list = self.groundtruth_lists(fields.BoxListFields.boxes)
+ gt_weights_list = self.groundtruth_lists(fields.BoxListFields.weights)
+ num_boxes = _to_float32(get_num_instances_from_weights(gt_weights_list))
+ num_predictions = float(len(scale_predictions))
+
+ assigner = self._target_assigner_dict[DETECTION_TASK]
+ (batch_indices, batch_height_width_targets, batch_offset_targets,
+ batch_weights) = assigner.assign_size_and_offset_targets(
+ height=input_height,
+ width=input_width,
+ gt_boxes_list=gt_boxes_list,
+ gt_weights_list=gt_weights_list)
+ batch_weights = tf.expand_dims(batch_weights, -1)
+
+ scale_loss = 0
+ offset_loss = 0
+ localization_loss_fn = self._od_params.localization_loss
+ for scale_pred, offset_pred in zip(scale_predictions, offset_predictions):
+ # Compute the scale loss.
+ scale_pred = cn_assigner.get_batch_predictions_from_indices(
+ scale_pred, batch_indices)
+ scale_loss += localization_loss_fn(
+ scale_pred, batch_height_width_targets, weights=batch_weights)
+ # Compute the offset loss.
+ offset_pred = cn_assigner.get_batch_predictions_from_indices(
+ offset_pred, batch_indices)
+ offset_loss += localization_loss_fn(
+ offset_pred, batch_offset_targets, weights=batch_weights)
+ scale_loss = tf.reduce_sum(scale_loss) / (
+ num_predictions * num_boxes)
+ offset_loss = tf.reduce_sum(offset_loss) / (
+ num_predictions * num_boxes)
+ return scale_loss, offset_loss
+
+ def _compute_keypoint_estimation_losses(self, task_name, input_height,
+ input_width, prediction_dict,
+ per_pixel_weights):
+ """Computes the weighted keypoint losses."""
+ kp_params = self._kp_params_dict[task_name]
+ heatmap_key = get_keypoint_name(task_name, KEYPOINT_HEATMAP)
+ offset_key = get_keypoint_name(task_name, KEYPOINT_OFFSET)
+ regression_key = get_keypoint_name(task_name, KEYPOINT_REGRESSION)
+ heatmap_loss = self._compute_kp_heatmap_loss(
+ input_height=input_height,
+ input_width=input_width,
+ task_name=task_name,
+ heatmap_predictions=prediction_dict[heatmap_key],
+ classification_loss_fn=kp_params.classification_loss,
+ per_pixel_weights=per_pixel_weights)
+ offset_loss = self._compute_kp_offset_loss(
+ input_height=input_height,
+ input_width=input_width,
+ task_name=task_name,
+ offset_predictions=prediction_dict[offset_key],
+ localization_loss_fn=kp_params.localization_loss)
+ reg_loss = self._compute_kp_regression_loss(
+ input_height=input_height,
+ input_width=input_width,
+ task_name=task_name,
+ regression_predictions=prediction_dict[regression_key],
+ localization_loss_fn=kp_params.localization_loss)
+
+ loss_dict = {}
+ loss_dict[heatmap_key] = (
+ kp_params.keypoint_heatmap_loss_weight * heatmap_loss)
+ loss_dict[offset_key] = (
+ kp_params.keypoint_offset_loss_weight * offset_loss)
+ loss_dict[regression_key] = (
+ kp_params.keypoint_regression_loss_weight * reg_loss)
+ return loss_dict
+
+ def _compute_kp_heatmap_loss(self, input_height, input_width, task_name,
+ heatmap_predictions, classification_loss_fn,
+ per_pixel_weights):
+ """Computes the heatmap loss of the keypoint estimation task.
+
+ Args:
+ input_height: An integer scalar tensor representing input image height.
+ input_width: An integer scalar tensor representing input image width.
+ task_name: A string representing the name of the keypoint task.
+ heatmap_predictions: A list of float tensors of shape [batch_size,
+ out_height, out_width, num_keypoints] representing the prediction heads
+ of the model for keypoint heatmap.
+ classification_loss_fn: An object_detection.core.losses.Loss object to
+ compute the loss for the class predictions in CenterNet.
+ per_pixel_weights: A float tensor of shape [batch_size,
+ out_height * out_width, 1] with 1s in locations where the spatial
+ coordinates fall within the height and width in true_image_shapes.
+
+ Returns:
+ loss: A float scalar tensor representing the object keypoint heatmap loss
+ normalized by number of instances.
+ """
+ gt_keypoints_list = self.groundtruth_lists(fields.BoxListFields.keypoints)
+ gt_classes_list = self.groundtruth_lists(fields.BoxListFields.classes)
+ gt_weights_list = self.groundtruth_lists(fields.BoxListFields.weights)
+ gt_boxes_list = self.groundtruth_lists(fields.BoxListFields.boxes)
+
+ assigner = self._target_assigner_dict[task_name]
+ (keypoint_heatmap, num_instances_per_kp_type,
+ valid_mask_batch) = assigner.assign_keypoint_heatmap_targets(
+ height=input_height,
+ width=input_width,
+ gt_keypoints_list=gt_keypoints_list,
+ gt_weights_list=gt_weights_list,
+ gt_classes_list=gt_classes_list,
+ gt_boxes_list=gt_boxes_list)
+ flattened_valid_mask = _flatten_spatial_dimensions(
+ tf.expand_dims(valid_mask_batch, axis=-1))
+ flattened_heapmap_targets = _flatten_spatial_dimensions(keypoint_heatmap)
+ # Sum over the number of instances per keypoint types to get the total
+ # number of keypoints. Note that this is used to normalized the loss and we
+ # keep the minimum value to be 1 to avoid generating weird loss value when
+ # no keypoint is in the image batch.
+ num_instances = tf.maximum(
+ tf.cast(tf.reduce_sum(num_instances_per_kp_type), dtype=tf.float32),
+ 1.0)
+ loss = 0.0
+ # Loop through each feature output head.
+ for pred in heatmap_predictions:
+ pred = _flatten_spatial_dimensions(pred)
+ unweighted_loss = classification_loss_fn(
+ pred,
+ flattened_heapmap_targets,
+ weights=tf.ones_like(per_pixel_weights))
+ # Apply the weights after the loss function to have full control over it.
+ loss += unweighted_loss * per_pixel_weights * flattened_valid_mask
+ loss = tf.reduce_sum(loss) / (
+ float(len(heatmap_predictions)) * num_instances)
+ return loss
+
+ def _compute_kp_offset_loss(self, input_height, input_width, task_name,
+ offset_predictions, localization_loss_fn):
+ """Computes the offset loss of the keypoint estimation task.
+
+ Args:
+ input_height: An integer scalar tensor representing input image height.
+ input_width: An integer scalar tensor representing input image width.
+ task_name: A string representing the name of the keypoint task.
+ offset_predictions: A list of float tensors of shape [batch_size,
+ out_height, out_width, 2] representing the prediction heads of the model
+ for keypoint offset.
+ localization_loss_fn: An object_detection.core.losses.Loss object to
+ compute the loss for the keypoint offset predictions in CenterNet.
+
+ Returns:
+ loss: A float scalar tensor representing the keypoint offset loss
+ normalized by number of total keypoints.
+ """
+ gt_keypoints_list = self.groundtruth_lists(fields.BoxListFields.keypoints)
+ gt_classes_list = self.groundtruth_lists(fields.BoxListFields.classes)
+ gt_weights_list = self.groundtruth_lists(fields.BoxListFields.weights)
+
+ assigner = self._target_assigner_dict[task_name]
+ (batch_indices, batch_offsets,
+ batch_weights) = assigner.assign_keypoints_offset_targets(
+ height=input_height,
+ width=input_width,
+ gt_keypoints_list=gt_keypoints_list,
+ gt_weights_list=gt_weights_list,
+ gt_classes_list=gt_classes_list)
+
+ # Keypoint offset loss.
+ loss = 0.0
+ for prediction in offset_predictions:
+ batch_size, out_height, out_width, channels = _get_shape(prediction, 4)
+ if channels > 2:
+ prediction = tf.reshape(
+ prediction, shape=[batch_size, out_height, out_width, -1, 2])
+ prediction = cn_assigner.get_batch_predictions_from_indices(
+ prediction, batch_indices)
+ # The dimensions passed are not as per the doc string but the loss
+ # still computes the correct value.
+ unweighted_loss = localization_loss_fn(
+ prediction,
+ batch_offsets,
+ weights=tf.expand_dims(tf.ones_like(batch_weights), -1))
+ # Apply the weights after the loss function to have full control over it.
+ loss += batch_weights * tf.reduce_sum(unweighted_loss, axis=1)
+
+ loss = tf.reduce_sum(loss) / (
+ float(len(offset_predictions)) *
+ tf.maximum(tf.reduce_sum(batch_weights), 1.0))
+ return loss
+
+ def _compute_kp_regression_loss(self, input_height, input_width, task_name,
+ regression_predictions, localization_loss_fn):
+ """Computes the keypoint regression loss of the keypoint estimation task.
+
+ Args:
+ input_height: An integer scalar tensor representing input image height.
+ input_width: An integer scalar tensor representing input image width.
+ task_name: A string representing the name of the keypoint task.
+ regression_predictions: A list of float tensors of shape [batch_size,
+ out_height, out_width, 2 * num_keypoints] representing the prediction
+ heads of the model for keypoint regression offset.
+ localization_loss_fn: An object_detection.core.losses.Loss object to
+ compute the loss for the keypoint regression offset predictions in
+ CenterNet.
+
+ Returns:
+ loss: A float scalar tensor representing the keypoint regression offset
+ loss normalized by number of total keypoints.
+ """
+ gt_boxes_list = self.groundtruth_lists(fields.BoxListFields.boxes)
+ gt_keypoints_list = self.groundtruth_lists(fields.BoxListFields.keypoints)
+ gt_classes_list = self.groundtruth_lists(fields.BoxListFields.classes)
+ gt_weights_list = self.groundtruth_lists(fields.BoxListFields.weights)
+ # keypoint regression offset loss.
+ assigner = self._target_assigner_dict[task_name]
+ (batch_indices, batch_regression_offsets,
+ batch_weights) = assigner.assign_joint_regression_targets(
+ height=input_height,
+ width=input_width,
+ gt_keypoints_list=gt_keypoints_list,
+ gt_classes_list=gt_classes_list,
+ gt_weights_list=gt_weights_list,
+ gt_boxes_list=gt_boxes_list)
+
+ loss = 0.0
+ for prediction in regression_predictions:
+ batch_size, out_height, out_width, _ = _get_shape(prediction, 4)
+ reshaped_prediction = tf.reshape(
+ prediction, shape=[batch_size, out_height, out_width, -1, 2])
+ reg_prediction = cn_assigner.get_batch_predictions_from_indices(
+ reshaped_prediction, batch_indices)
+ unweighted_loss = localization_loss_fn(
+ reg_prediction,
+ batch_regression_offsets,
+ weights=tf.expand_dims(tf.ones_like(batch_weights), -1))
+ # Apply the weights after the loss function to have full control over it.
+ loss += batch_weights * tf.reduce_sum(unweighted_loss, axis=1)
+
+ loss = tf.reduce_sum(loss) / (
+ float(len(regression_predictions)) *
+ tf.maximum(tf.reduce_sum(batch_weights), 1.0))
+ return loss
+
+ def _compute_segmentation_losses(self, prediction_dict, per_pixel_weights):
+ """Computes all the losses associated with segmentation.
+
+ Args:
+ prediction_dict: The dictionary returned from the predict() method.
+ per_pixel_weights: A float tensor of shape [batch_size,
+ out_height * out_width, 1] with 1s in locations where the spatial
+ coordinates fall within the height and width in true_image_shapes.
+
+ Returns:
+ A dictionary with segmentation losses.
+ """
+ segmentation_heatmap = prediction_dict[SEGMENTATION_HEATMAP]
+ mask_loss = self._compute_mask_loss(
+ segmentation_heatmap, per_pixel_weights)
+ losses = {
+ SEGMENTATION_HEATMAP: mask_loss
+ }
+ return losses
+
+ def _compute_mask_loss(self, segmentation_predictions,
+ per_pixel_weights):
+ """Computes the mask loss.
+
+ Args:
+ segmentation_predictions: A list of float32 tensors of shape [batch_size,
+ out_height, out_width, num_classes].
+ per_pixel_weights: A float tensor of shape [batch_size,
+ out_height * out_width, 1] with 1s in locations where the spatial
+ coordinates fall within the height and width in true_image_shapes.
+
+ Returns:
+ A float scalar tensor representing the mask loss.
+ """
+ gt_masks_list = self.groundtruth_lists(fields.BoxListFields.masks)
+ gt_classes_list = self.groundtruth_lists(fields.BoxListFields.classes)
+
+ # Convert the groundtruth to targets.
+ assigner = self._target_assigner_dict[SEGMENTATION_TASK]
+ heatmap_targets = assigner.assign_segmentation_targets(
+ gt_masks_list=gt_masks_list,
+ gt_classes_list=gt_classes_list)
+
+ flattened_heatmap_targets = _flatten_spatial_dimensions(heatmap_targets)
+
+ loss = 0.0
+ mask_loss_fn = self._mask_params.classification_loss
+ total_pixels_in_loss = tf.reduce_sum(per_pixel_weights)
+
+ # Loop through each feature output head.
+ for pred in segmentation_predictions:
+ pred = _flatten_spatial_dimensions(pred)
+ loss += mask_loss_fn(
+ pred, flattened_heatmap_targets, weights=per_pixel_weights)
+ # TODO(ronnyvotel): Consider other ways to normalize loss.
+ total_loss = tf.reduce_sum(loss) / (
+ float(len(segmentation_predictions)) * total_pixels_in_loss)
+ return total_loss
+
+ def preprocess(self, inputs):
+ outputs = shape_utils.resize_images_and_return_shapes(
+ inputs, self._image_resizer_fn)
+ resized_inputs, true_image_shapes = outputs
+
+ return (self._feature_extractor.preprocess(resized_inputs),
+ true_image_shapes)
+
+ def predict(self, preprocessed_inputs, _):
+ """Predicts CenterNet prediction tensors given an input batch.
+
+ Feature extractors are free to produce predictions from multiple feature
+ maps and therefore we return a dictionary mapping strings to lists.
+ E.g. the hourglass backbone produces two feature maps.
+
+ Args:
+ preprocessed_inputs: a [batch, height, width, channels] float32 tensor
+ representing a batch of images.
+
+ Returns:
+ prediction_dict: a dictionary holding predicted tensors with
+ 'preprocessed_inputs' - The input image after being resized and
+ preprocessed by the feature extractor.
+ 'object_center' - A list of size num_feature_outputs containing
+ float tensors of size [batch_size, output_height, output_width,
+ num_classes] representing the predicted object center heatmap logits.
+ 'box/scale' - [optional] A list of size num_feature_outputs holding
+ float tensors of size [batch_size, output_height, output_width, 2]
+ representing the predicted box height and width at each output
+ location. This field exists only when object detection task is
+ specified.
+ 'box/offset' - [optional] A list of size num_feature_outputs holding
+ float tensors of size [batch_size, output_height, output_width, 2]
+ representing the predicted y and x offsets at each output location.
+ '$TASK_NAME/keypoint_heatmap' - [optional] A list of size
+ num_feature_outputs holding float tensors of size [batch_size,
+ output_height, output_width, num_keypoints] representing the predicted
+ keypoint heatmap logits.
+ '$TASK_NAME/keypoint_offset' - [optional] A list of size
+ num_feature_outputs holding float tensors of size [batch_size,
+ output_height, output_width, 2] representing the predicted keypoint
+ offsets at each output location.
+ '$TASK_NAME/keypoint_regression' - [optional] A list of size
+ num_feature_outputs holding float tensors of size [batch_size,
+ output_height, output_width, 2 * num_keypoints] representing the
+ predicted keypoint regression at each output location.
+ 'segmentation/heatmap' - [optional] A list of size num_feature_outputs
+ holding float tensors of size [batch_size, output_height,
+ output_width, num_classes] representing the mask logits.
+ Note the $TASK_NAME is provided by the KeypointEstimation namedtuple
+ used to differentiate between different keypoint tasks.
+ """
+ features_list = self._feature_extractor(preprocessed_inputs)
+
+ predictions = {}
+ for head_name, heads in self._prediction_head_dict.items():
+ predictions[head_name] = [
+ head(feature) for (feature, head) in zip(features_list, heads)
+ ]
+ predictions['preprocessed_inputs'] = preprocessed_inputs
+
+ self._batched_prediction_tensor_names = predictions.keys()
+ return predictions
+
+ def loss(self, prediction_dict, true_image_shapes, scope=None):
+ """Computes scalar loss tensors with respect to provided groundtruth.
+
+ This function implements the various CenterNet losses.
+
+ Args:
+ prediction_dict: a dictionary holding predicted tensors returned by
+ "predict" function.
+ true_image_shapes: int32 tensor of shape [batch, 3] where each row is of
+ the form [height, width, channels] indicating the shapes of true images
+ in the resized images, as resized images can be padded with zeros.
+ scope: Optional scope name.
+
+ Returns:
+ A dictionary mapping the keys ['Loss/object_center', 'Loss/box/scale',
+ 'Loss/box/offset', 'Loss/$TASK_NAME/keypoint/heatmap',
+ 'Loss/$TASK_NAME/keypoint/offset',
+ 'Loss/$TASK_NAME/keypoint/regression', 'Loss/segmentation/heatmap'] to
+ scalar tensors corresponding to the losses for different tasks. Note the
+ $TASK_NAME is provided by the KeypointEstimation namedtuple used to
+ differentiate between different keypoint tasks.
+ """
+
+ _, input_height, input_width, _ = _get_shape(
+ prediction_dict['preprocessed_inputs'], 4)
+
+ output_height, output_width = (input_height // self._stride,
+ input_width // self._stride)
+
+ # TODO(vighneshb) Explore whether using floor here is safe.
+ output_true_image_shapes = tf.ceil(
+ tf.to_float(true_image_shapes) / self._stride)
+ valid_anchor_weights = get_valid_anchor_weights_in_flattened_image(
+ output_true_image_shapes, output_height, output_width)
+ valid_anchor_weights = tf.expand_dims(valid_anchor_weights, 2)
+
+ object_center_loss = self._compute_object_center_loss(
+ object_center_predictions=prediction_dict[OBJECT_CENTER],
+ input_height=input_height,
+ input_width=input_width,
+ per_pixel_weights=valid_anchor_weights)
+ losses = {
+ OBJECT_CENTER:
+ self._center_params.object_center_loss_weight * object_center_loss
+ }
+ if self._od_params is not None:
+ od_losses = self._compute_object_detection_losses(
+ input_height=input_height,
+ input_width=input_width,
+ prediction_dict=prediction_dict,
+ per_pixel_weights=valid_anchor_weights)
+ for key in od_losses:
+ od_losses[key] = od_losses[key] * self._od_params.task_loss_weight
+ losses.update(od_losses)
+
+ if self._kp_params_dict is not None:
+ for task_name, params in self._kp_params_dict.items():
+ kp_losses = self._compute_keypoint_estimation_losses(
+ task_name=task_name,
+ input_height=input_height,
+ input_width=input_width,
+ prediction_dict=prediction_dict,
+ per_pixel_weights=valid_anchor_weights)
+ for key in kp_losses:
+ kp_losses[key] = kp_losses[key] * params.task_loss_weight
+ losses.update(kp_losses)
+
+ if self._mask_params is not None:
+ seg_losses = self._compute_segmentation_losses(
+ prediction_dict=prediction_dict,
+ per_pixel_weights=valid_anchor_weights)
+ for key in seg_losses:
+ seg_losses[key] = seg_losses[key] * self._mask_params.task_loss_weight
+ losses.update(seg_losses)
+
+ # Prepend the LOSS_KEY_PREFIX to the keys in the dictionary such that the
+ # losses will be grouped together in Tensorboard.
+ return dict([('%s/%s' % (LOSS_KEY_PREFIX, key), val)
+ for key, val in losses.items()])
+
+ def postprocess(self, prediction_dict, true_image_shapes, **params):
+ """Produces boxes given a prediction dict returned by predict().
+
+ Although predict returns a list of tensors, only the last tensor in
+ each list is used for making box predictions.
+
+ Args:
+ prediction_dict: a dictionary holding predicted tensors from "predict"
+ function.
+ true_image_shapes: int32 tensor of shape [batch, 3] where each row is of
+ the form [height, width, channels] indicating the shapes of true images
+ in the resized images, as resized images can be padded with zeros.
+ **params: Currently ignored.
+
+ Returns:
+ detections: a dictionary containing the following fields
+ detection_boxes - A tensor of shape [batch, max_detections, 4]
+ holding the predicted boxes.
+ detection_scores: A tensor of shape [batch, max_detections] holding
+ the predicted score for each box.
+ detection_classes: An integer tensor of shape [batch, max_detections]
+ containing the detected class for each box.
+ num_detections: An integer tensor of shape [batch] containing the
+ number of detected boxes for each sample in the batch.
+ detection_keypoints: (Optional) A float tensor of shape [batch,
+ max_detections, num_keypoints, 2] with normalized keypoints. Any
+ invalid keypoints have their coordinates and scores set to 0.0.
+ detection_keypoint_scores: (Optional) A float tensor of shape [batch,
+ max_detection, num_keypoints] with scores for each keypoint.
+ detection_masks: (Optional) An int tensor of shape [batch,
+ max_detections, mask_height, mask_width] with binarized masks for each
+ detection.
+ """
+ object_center_prob = tf.nn.sigmoid(prediction_dict[OBJECT_CENTER][-1])
+ # Get x, y and channel indices corresponding to the top indices in the class
+ # center predictions.
+ detection_scores, y_indices, x_indices, channel_indices = (
+ top_k_feature_map_locations(
+ object_center_prob, max_pool_kernel_size=3,
+ k=self._center_params.max_box_predictions))
+
+ boxes_strided, classes, scores, num_detections = (
+ prediction_tensors_to_boxes(
+ detection_scores, y_indices, x_indices, channel_indices,
+ prediction_dict[BOX_SCALE][-1], prediction_dict[BOX_OFFSET][-1]))
+
+ boxes = convert_strided_predictions_to_normalized_boxes(
+ boxes_strided, self._stride, true_image_shapes)
+
+ postprocess_dict = {
+ fields.DetectionResultFields.detection_boxes: boxes,
+ fields.DetectionResultFields.detection_scores: scores,
+ fields.DetectionResultFields.detection_classes: classes,
+ fields.DetectionResultFields.num_detections: num_detections,
+ }
+
+ if self._kp_params_dict:
+ keypoints, keypoint_scores = self._postprocess_keypoints(
+ prediction_dict, classes, y_indices, x_indices,
+ boxes_strided, num_detections)
+ keypoints, keypoint_scores = (
+ convert_strided_predictions_to_normalized_keypoints(
+ keypoints, keypoint_scores, self._stride, true_image_shapes,
+ clip_out_of_frame_keypoints=True))
+ postprocess_dict.update({
+ fields.DetectionResultFields.detection_keypoints: keypoints,
+ fields.DetectionResultFields.detection_keypoint_scores:
+ keypoint_scores
+ })
+
+ if self._mask_params:
+ masks = tf.nn.sigmoid(prediction_dict[SEGMENTATION_HEATMAP][-1])
+ instance_masks = convert_strided_predictions_to_instance_masks(
+ boxes, classes, masks, self._stride, self._mask_params.mask_height,
+ self._mask_params.mask_width, true_image_shapes,
+ self._mask_params.score_threshold)
+ postprocess_dict.update({
+ fields.DetectionResultFields.detection_masks:
+ instance_masks
+ })
+ return postprocess_dict
+
+ def _postprocess_keypoints(self, prediction_dict, classes, y_indices,
+ x_indices, boxes, num_detections):
+ """Performs postprocessing on keypoint predictions.
+
+ Args:
+ prediction_dict: a dictionary holding predicted tensors, returned from the
+ predict() method. This dictionary should contain keypoint prediction
+ feature maps for each keypoint task.
+ classes: A [batch_size, max_detections] int tensor with class indices for
+ all detected objects.
+ y_indices: A [batch_size, max_detections] int tensor with y indices for
+ all object centers.
+ x_indices: A [batch_size, max_detections] int tensor with x indices for
+ all object centers.
+ boxes: A [batch_size, max_detections, 4] float32 tensor with bounding
+ boxes in (un-normalized) output space.
+ num_detections: A [batch_size] int tensor with the number of valid
+ detections for each image.
+
+ Returns:
+ A tuple of
+ keypoints: a [batch_size, max_detection, num_total_keypoints, 2] float32
+ tensor with keypoints in the output (strided) coordinate frame.
+ keypoint_scores: a [batch_size, max_detections, num_total_keypoints]
+ float32 tensor with keypoint scores.
+ """
+ total_num_keypoints = sum(len(kp_dict.keypoint_indices) for kp_dict
+ in self._kp_params_dict.values())
+ batch_size, max_detections, _ = _get_shape(boxes, 3)
+ kpt_coords_for_example_list = []
+ kpt_scores_for_example_list = []
+ for ex_ind in range(batch_size):
+ kpt_coords_for_class_list = []
+ kpt_scores_for_class_list = []
+ instance_inds_for_class_list = []
+ for task_name, kp_params in self._kp_params_dict.items():
+ keypoint_heatmap = prediction_dict[
+ get_keypoint_name(task_name, KEYPOINT_HEATMAP)][-1]
+ keypoint_offsets = prediction_dict[
+ get_keypoint_name(task_name, KEYPOINT_OFFSET)][-1]
+ keypoint_regression = prediction_dict[
+ get_keypoint_name(task_name, KEYPOINT_REGRESSION)][-1]
+ instance_inds = self._get_instance_indices(
+ classes, num_detections, ex_ind, kp_params.class_id)
+
+ def true_fn(
+ keypoint_heatmap, keypoint_offsets, keypoint_regression,
+ classes, y_indices, x_indices, boxes, instance_inds,
+ ex_ind, kp_params):
+ """Logics to execute when instance_inds is not an empty set."""
+ # Postprocess keypoints and scores for class and single image. Shapes
+ # are [1, num_instances_i, num_keypoints_i, 2] and
+ # [1, num_instances_i, num_keypoints_i], respectively. Note that
+ # num_instances_i and num_keypoints_i refers to the number of
+ # instances and keypoints for class i, respectively.
+ kpt_coords_for_class, kpt_scores_for_class = (
+ self._postprocess_keypoints_for_class_and_image(
+ keypoint_heatmap, keypoint_offsets, keypoint_regression,
+ classes, y_indices, x_indices, boxes, instance_inds,
+ ex_ind, kp_params))
+ # Expand keypoint dimension (with padding) so that coordinates and
+ # scores have shape [1, num_instances_i, num_total_keypoints, 2] and
+ # [1, num_instances_i, num_total_keypoints], respectively.
+ kpts_coords_for_class_padded, kpt_scores_for_class_padded = (
+ _pad_to_full_keypoint_dim(
+ kpt_coords_for_class, kpt_scores_for_class,
+ kp_params.keypoint_indices, total_num_keypoints))
+ return kpts_coords_for_class_padded, kpt_scores_for_class_padded
+
+ def false_fn():
+ """Logics to execute when the instance_inds is an empty set."""
+ return (tf.zeros([1, 0, total_num_keypoints, 2], dtype=tf.float32),
+ tf.zeros([1, 0, total_num_keypoints], dtype=tf.float32))
+
+ true_fn = functools.partial(
+ true_fn, keypoint_heatmap, keypoint_offsets, keypoint_regression,
+ classes, y_indices, x_indices, boxes, instance_inds, ex_ind,
+ kp_params)
+ results = tf.cond(tf.size(instance_inds) > 0, true_fn, false_fn)
+
+ kpt_coords_for_class_list.append(results[0])
+ kpt_scores_for_class_list.append(results[1])
+ instance_inds_for_class_list.append(instance_inds)
+
+ # Concatenate all keypoints across all classes (single example).
+ kpt_coords_for_example = tf.concat(kpt_coords_for_class_list, axis=1)
+ kpt_scores_for_example = tf.concat(kpt_scores_for_class_list, axis=1)
+ instance_inds_for_example = tf.concat(instance_inds_for_class_list,
+ axis=0)
+
+ if tf.size(instance_inds_for_example) > 0:
+ # Scatter into tensor where instances align with original detection
+ # instances. New shape of keypoint coordinates and scores are
+ # [1, max_detections, num_total_keypoints, 2] and
+ # [1, max_detections, num_total_keypoints], respectively.
+ kpt_coords_for_example_all_det, kpt_scores_for_example_all_det = (
+ _pad_to_full_instance_dim(
+ kpt_coords_for_example, kpt_scores_for_example,
+ instance_inds_for_example,
+ self._center_params.max_box_predictions))
+ else:
+ kpt_coords_for_example_all_det = tf.zeros(
+ [1, max_detections, total_num_keypoints, 2], dtype=tf.float32)
+ kpt_scores_for_example_all_det = tf.zeros(
+ [1, max_detections, total_num_keypoints], dtype=tf.float32)
+
+ kpt_coords_for_example_list.append(kpt_coords_for_example_all_det)
+ kpt_scores_for_example_list.append(kpt_scores_for_example_all_det)
+
+ # Concatenate all keypoints and scores from all examples in the batch.
+ # Shapes are [batch_size, max_detections, num_total_keypoints, 2] and
+ # [batch_size, max_detections, num_total_keypoints], respectively.
+ keypoints = tf.concat(kpt_coords_for_example_list, axis=0)
+ keypoint_scores = tf.concat(kpt_scores_for_example_list, axis=0)
+
+ return keypoints, keypoint_scores
+
+ def _get_instance_indices(self, classes, num_detections, batch_index,
+ class_id):
+ """Gets the instance indices that match the target class ID.
+
+ Args:
+ classes: A [batch_size, max_detections] int tensor with class indices for
+ all detected objects.
+ num_detections: A [batch_size] int tensor with the number of valid
+ detections for each image.
+ batch_index: An integer specifying the index for an example in the batch.
+ class_id: Class id
+
+ Returns:
+ instance_inds: A [num_instances] int tensor where each element indicates
+ the instance location within the `classes` tensor. This is useful to
+ associate the refined keypoints with the original detections (i.e.
+ boxes)
+ """
+ classes = classes[batch_index:batch_index+1, ...]
+ _, max_detections = shape_utils.combined_static_and_dynamic_shape(
+ classes)
+ # Get the detection indices corresponding to the target class.
+ valid_detections_with_kpt_class = tf.math.logical_and(
+ tf.range(max_detections) < num_detections[batch_index],
+ classes[0] == class_id)
+ instance_inds = tf.where(valid_detections_with_kpt_class)[:, 0]
+ return instance_inds
+
+ def _postprocess_keypoints_for_class_and_image(
+ self, keypoint_heatmap, keypoint_offsets, keypoint_regression, classes,
+ y_indices, x_indices, boxes, indices_with_kpt_class, batch_index,
+ kp_params):
+ """Postprocess keypoints for a single image and class.
+
+ This function performs the following postprocessing operations on a single
+ image and single keypoint class:
+ - Converts keypoints scores to range [0, 1] with sigmoid.
+ - Determines the detections that correspond to the specified keypoint class.
+ - Gathers the regressed keypoints at the detection (i.e. box) centers.
+ - Gathers keypoint candidates from the keypoint heatmaps.
+ - Snaps regressed keypoints to nearby keypoint candidates.
+
+ Args:
+ keypoint_heatmap: A [batch_size, height, width, num_keypoints] float32
+ tensor with keypoint heatmaps.
+ keypoint_offsets: A [batch_size, height, width, 2] float32 tensor with
+ local offsets to keypoint centers.
+ keypoint_regression: A [batch_size, height, width, 2 * num_keypoints]
+ float32 tensor with regressed offsets to all keypoints.
+ classes: A [batch_size, max_detections] int tensor with class indices for
+ all detected objects.
+ y_indices: A [batch_size, max_detections] int tensor with y indices for
+ all object centers.
+ x_indices: A [batch_size, max_detections] int tensor with x indices for
+ all object centers.
+ boxes: A [batch_size, max_detections, 4] float32 tensor with detected
+ boxes in the output (strided) frame.
+ indices_with_kpt_class: A [num_instances] int tensor where each element
+ indicates the instance location within the `classes` tensor. This is
+ useful to associate the refined keypoints with the original detections
+ (i.e. boxes)
+ batch_index: An integer specifying the index for an example in the batch.
+ kp_params: A `KeypointEstimationParams` object with parameters for a
+ single keypoint class.
+
+ Returns:
+ A tuple of
+ refined_keypoints: A [1, num_instances, num_keypoints, 2] float32 tensor
+ with refined keypoints for a single class in a single image, expressed
+ in the output (strided) coordinate frame. Note that `num_instances` is a
+ dynamic dimension, and corresponds to the number of valid detections
+ for the specific class.
+ refined_scores: A [1, num_instances, num_keypoints] float32 tensor with
+ keypoint scores.
+ """
+ keypoint_indices = kp_params.keypoint_indices
+ num_keypoints = len(keypoint_indices)
+
+ keypoint_heatmap = tf.nn.sigmoid(
+ keypoint_heatmap[batch_index:batch_index+1, ...])
+ keypoint_offsets = keypoint_offsets[batch_index:batch_index+1, ...]
+ keypoint_regression = keypoint_regression[batch_index:batch_index+1, ...]
+ y_indices = y_indices[batch_index:batch_index+1, ...]
+ x_indices = x_indices[batch_index:batch_index+1, ...]
+
+ # Gather the feature map locations corresponding to the object class.
+ y_indices_for_kpt_class = tf.gather(y_indices, indices_with_kpt_class,
+ axis=1)
+ x_indices_for_kpt_class = tf.gather(x_indices, indices_with_kpt_class,
+ axis=1)
+ boxes_for_kpt_class = tf.gather(boxes, indices_with_kpt_class, axis=1)
+
+ # Gather the regressed keypoints. Final tensor has shape
+ # [1, num_instances, num_keypoints, 2].
+ regressed_keypoints_for_objects = regressed_keypoints_at_object_centers(
+ keypoint_regression, y_indices_for_kpt_class, x_indices_for_kpt_class)
+ regressed_keypoints_for_objects = tf.reshape(
+ regressed_keypoints_for_objects, [1, -1, num_keypoints, 2])
+
+ # Get the candidate keypoints and scores.
+ # The shape of keypoint_candidates and keypoint_scores is:
+ # [1, num_candidates_per_keypoint, num_keypoints, 2] and
+ # [1, num_candidates_per_keypoint, num_keypoints], respectively.
+ keypoint_candidates, keypoint_scores, num_keypoint_candidates = (
+ prediction_tensors_to_keypoint_candidates(
+ keypoint_heatmap, keypoint_offsets,
+ keypoint_score_threshold=(
+ kp_params.keypoint_candidate_score_threshold),
+ max_pool_kernel_size=kp_params.peak_max_pool_kernel_size,
+ max_candidates=kp_params.num_candidates_per_keypoint))
+
+ # Get the refined keypoints and scores, of shape
+ # [1, num_instances, num_keypoints, 2] and
+ # [1, num_instances, num_keypoints], respectively.
+ refined_keypoints, refined_scores = refine_keypoints(
+ regressed_keypoints_for_objects, keypoint_candidates, keypoint_scores,
+ num_keypoint_candidates, bboxes=boxes_for_kpt_class,
+ unmatched_keypoint_score=kp_params.unmatched_keypoint_score,
+ box_scale=kp_params.box_scale,
+ candidate_search_scale=kp_params.candidate_search_scale,
+ candidate_ranking_mode=kp_params.candidate_ranking_mode)
+
+ return refined_keypoints, refined_scores
+
+ def regularization_losses(self):
+ return []
+
+ def restore_map(self,
+ fine_tune_checkpoint_type='detection',
+ load_all_detection_checkpoint_vars=False):
+ raise RuntimeError('CenterNetMetaArch not supported under TF1.x.')
+
+ def restore_from_objects(self, fine_tune_checkpoint_type='detection'):
+ """Returns a map of Trackable objects to load from a foreign checkpoint.
+
+ Returns a dictionary of Tensorflow 2 Trackable objects (e.g. tf.Module
+ or Checkpoint). This enables the model to initialize based on weights from
+ another task. For example, the feature extractor variables from a
+ classification model can be used to bootstrap training of an object
+ detector. When loading from an object detection model, the checkpoint model
+ should have the same parameters as this detection model with exception of
+ the num_classes parameter.
+
+ Note that this function is intended to be used to restore Keras-based
+ models when running Tensorflow 2, whereas restore_map (not implemented
+ in CenterNet) is intended to be used to restore Slim-based models when
+ running Tensorflow 1.x.
+
+ TODO(jonathanhuang): Make this function consistent with other
+ meta-architectures.
+
+ Args:
+ fine_tune_checkpoint_type: whether to restore from a full detection
+ checkpoint (with compatible variable names) or to restore from a
+ classification checkpoint for initialization prior to training.
+ Valid values: `detection`, `classification`. Default 'detection'.
+
+ Returns:
+ A dict mapping keys to Trackable objects (tf.Module or Checkpoint).
+ """
+
+ if fine_tune_checkpoint_type == 'classification':
+ return {'feature_extractor': self._feature_extractor.get_base_model()}
+
+ if fine_tune_checkpoint_type == 'detection':
+ return {'feature_extractor': self._feature_extractor.get_model()}
+
+ else:
+ raise ValueError('Not supported fine tune checkpoint type - {}'.format(
+ fine_tune_checkpoint_type))
+
+ def updates(self):
+ raise RuntimeError('This model is intended to be used with model_lib_v2 '
+ 'which does not support updates()')
diff --git a/research/object_detection/meta_architectures/center_net_meta_arch_tf2_test.py b/research/object_detection/meta_architectures/center_net_meta_arch_tf2_test.py
new file mode 100644
index 0000000000000000000000000000000000000000..298081b7bdf27ab4023b085adb10ad4cacd96921
--- /dev/null
+++ b/research/object_detection/meta_architectures/center_net_meta_arch_tf2_test.py
@@ -0,0 +1,1681 @@
+# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Tests for the CenterNet Meta architecture code."""
+
+from __future__ import division
+
+import functools
+import unittest
+from absl.testing import parameterized
+import numpy as np
+import tensorflow.compat.v1 as tf
+
+from object_detection.core import losses
+from object_detection.core import preprocessor
+from object_detection.core import standard_fields as fields
+from object_detection.core import target_assigner as cn_assigner
+from object_detection.meta_architectures import center_net_meta_arch as cnma
+from object_detection.models import center_net_resnet_feature_extractor
+from object_detection.utils import test_case
+from object_detection.utils import tf_version
+
+
+@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.')
+class CenterNetMetaArchPredictionHeadTest(test_case.TestCase):
+ """Test CenterNet meta architecture prediction head."""
+
+ def test_prediction_head(self):
+ head = cnma.make_prediction_net(num_out_channels=7)
+ output = head(np.zeros((4, 128, 128, 8)))
+
+ self.assertEqual((4, 128, 128, 7), output.shape)
+
+
+@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.')
+class CenterNetMetaArchHelpersTest(test_case.TestCase, parameterized.TestCase):
+ """Test for CenterNet meta architecture related functions."""
+
+ def test_row_col_indices_from_flattened_indices(self):
+ """Tests that the computation of row, col, channel indices is correct."""
+
+ r_grid, c_grid, ch_grid = (np.zeros((5, 4, 3), dtype=np.int),
+ np.zeros((5, 4, 3), dtype=np.int),
+ np.zeros((5, 4, 3), dtype=np.int))
+
+ r_grid[..., 0] = r_grid[..., 1] = r_grid[..., 2] = np.array(
+ [[0, 0, 0, 0],
+ [1, 1, 1, 1],
+ [2, 2, 2, 2],
+ [3, 3, 3, 3],
+ [4, 4, 4, 4]]
+ )
+
+ c_grid[..., 0] = c_grid[..., 1] = c_grid[..., 2] = np.array(
+ [[0, 1, 2, 3],
+ [0, 1, 2, 3],
+ [0, 1, 2, 3],
+ [0, 1, 2, 3],
+ [0, 1, 2, 3]]
+ )
+
+ for i in range(3):
+ ch_grid[..., i] = i
+
+ indices = np.arange(60)
+ ri, ci, chi = cnma.row_col_channel_indices_from_flattened_indices(
+ indices, 4, 3)
+
+ np.testing.assert_array_equal(ri, r_grid.flatten())
+ np.testing.assert_array_equal(ci, c_grid.flatten())
+ np.testing.assert_array_equal(chi, ch_grid.flatten())
+
+ def test_flattened_indices_from_row_col_indices(self):
+
+ r = np.array(
+ [[0, 0, 0, 0],
+ [1, 1, 1, 1],
+ [2, 2, 2, 2]]
+ )
+
+ c = np.array(
+ [[0, 1, 2, 3],
+ [0, 1, 2, 3],
+ [0, 1, 2, 3]]
+ )
+
+ idx = cnma.flattened_indices_from_row_col_indices(r, c, 4)
+ np.testing.assert_array_equal(np.arange(12), idx.flatten())
+
+ def test_get_valid_anchor_weights_in_flattened_image(self):
+ """Tests that the anchor weights are valid upon flattening out."""
+
+ valid_weights = np.zeros((2, 5, 5), dtype=np.float)
+
+ valid_weights[0, :3, :4] = 1.0
+ valid_weights[1, :2, :2] = 1.0
+
+ def graph_fn():
+ true_image_shapes = tf.constant([[3, 4], [2, 2]])
+ w = cnma.get_valid_anchor_weights_in_flattened_image(
+ true_image_shapes, 5, 5)
+ return w
+
+ w = self.execute(graph_fn, [])
+ np.testing.assert_allclose(w, valid_weights.reshape(2, -1))
+ self.assertEqual((2, 25), w.shape)
+
+ def test_convert_strided_predictions_to_normalized_boxes(self):
+ """Tests that boxes have correct coordinates in normalized input space."""
+
+ def graph_fn():
+ boxes = np.zeros((2, 3, 4), dtype=np.float32)
+
+ boxes[0] = [[10, 20, 30, 40], [20, 30, 50, 100], [50, 60, 100, 180]]
+ boxes[1] = [[-5, -5, 5, 5], [45, 60, 110, 120], [150, 150, 200, 250]]
+
+ true_image_shapes = tf.constant([[100, 90, 3], [150, 150, 3]])
+
+ clipped_boxes = (
+ cnma.convert_strided_predictions_to_normalized_boxes(
+ boxes, 2, true_image_shapes))
+ return clipped_boxes
+
+ clipped_boxes = self.execute(graph_fn, [])
+
+ expected_boxes = np.zeros((2, 3, 4), dtype=np.float32)
+ expected_boxes[0] = [[0.2, 4./9, 0.6, 8./9], [0.4, 2./3, 1, 1],
+ [1, 1, 1, 1]]
+ expected_boxes[1] = [[0., 0, 1./15, 1./15], [3./5, 4./5, 1, 1],
+ [1, 1, 1, 1]]
+
+ np.testing.assert_allclose(expected_boxes, clipped_boxes)
+
+ @parameterized.parameters(
+ {'clip_to_window': True},
+ {'clip_to_window': False}
+ )
+ def test_convert_strided_predictions_to_normalized_keypoints(
+ self, clip_to_window):
+ """Tests that keypoints have correct coordinates in normalized coords."""
+
+ keypoint_coords_np = np.array(
+ [
+ # Example 0.
+ [
+ [[-10., 8.], [60., 22.], [60., 120.]],
+ [[20., 20.], [0., 0.], [0., 0.]],
+ ],
+ # Example 1.
+ [
+ [[40., 50.], [20., 160.], [200., 150.]],
+ [[10., 0.], [40., 10.], [0., 0.]],
+ ],
+ ], dtype=np.float32)
+ keypoint_scores_np = np.array(
+ [
+ # Example 0.
+ [
+ [1.0, 0.9, 0.2],
+ [0.7, 0.0, 0.0],
+ ],
+ # Example 1.
+ [
+ [1.0, 1.0, 0.2],
+ [0.7, 0.6, 0.0],
+ ],
+ ], dtype=np.float32)
+
+ def graph_fn():
+ keypoint_coords = tf.constant(keypoint_coords_np, dtype=tf.float32)
+ keypoint_scores = tf.constant(keypoint_scores_np, dtype=tf.float32)
+ true_image_shapes = tf.constant([[320, 400, 3], [640, 640, 3]])
+ stride = 4
+
+ keypoint_coords_out, keypoint_scores_out = (
+ cnma.convert_strided_predictions_to_normalized_keypoints(
+ keypoint_coords, keypoint_scores, stride, true_image_shapes,
+ clip_to_window))
+ return keypoint_coords_out, keypoint_scores_out
+
+ keypoint_coords_out, keypoint_scores_out = self.execute(graph_fn, [])
+
+ if clip_to_window:
+ expected_keypoint_coords_np = np.array(
+ [
+ # Example 0.
+ [
+ [[0.0, 0.08], [0.75, 0.22], [0.75, 1.0]],
+ [[0.25, 0.2], [0., 0.], [0.0, 0.0]],
+ ],
+ # Example 1.
+ [
+ [[0.25, 0.3125], [0.125, 1.0], [1.0, 0.9375]],
+ [[0.0625, 0.], [0.25, 0.0625], [0., 0.]],
+ ],
+ ], dtype=np.float32)
+ expected_keypoint_scores_np = np.array(
+ [
+ # Example 0.
+ [
+ [0.0, 0.9, 0.0],
+ [0.7, 0.0, 0.0],
+ ],
+ # Example 1.
+ [
+ [1.0, 1.0, 0.0],
+ [0.7, 0.6, 0.0],
+ ],
+ ], dtype=np.float32)
+ else:
+ expected_keypoint_coords_np = np.array(
+ [
+ # Example 0.
+ [
+ [[-0.125, 0.08], [0.75, 0.22], [0.75, 1.2]],
+ [[0.25, 0.2], [0., 0.], [0., 0.]],
+ ],
+ # Example 1.
+ [
+ [[0.25, 0.3125], [0.125, 1.0], [1.25, 0.9375]],
+ [[0.0625, 0.], [0.25, 0.0625], [0., 0.]],
+ ],
+ ], dtype=np.float32)
+ expected_keypoint_scores_np = np.array(
+ [
+ # Example 0.
+ [
+ [1.0, 0.9, 0.2],
+ [0.7, 0.0, 0.0],
+ ],
+ # Example 1.
+ [
+ [1.0, 1.0, 0.2],
+ [0.7, 0.6, 0.0],
+ ],
+ ], dtype=np.float32)
+ np.testing.assert_allclose(expected_keypoint_coords_np, keypoint_coords_out)
+ np.testing.assert_allclose(expected_keypoint_scores_np, keypoint_scores_out)
+
+ def test_convert_strided_predictions_to_instance_masks(self):
+
+ def graph_fn():
+ boxes = tf.constant(
+ [
+ [[0.5, 0.5, 1.0, 1.0],
+ [0.0, 0.5, 0.5, 1.0],
+ [0.0, 0.0, 0.0, 0.0]],
+ ], tf.float32)
+ classes = tf.constant(
+ [
+ [0, 1, 0],
+ ], tf.int32)
+ masks_np = np.zeros((1, 4, 4, 2), dtype=np.float32)
+ masks_np[0, :, 2:, 0] = 1 # Class 0.
+ masks_np[0, :, :3, 1] = 1 # Class 1.
+ masks = tf.constant(masks_np)
+ true_image_shapes = tf.constant([[6, 8, 3]])
+ instance_masks = cnma.convert_strided_predictions_to_instance_masks(
+ boxes, classes, masks, stride=2, mask_height=2, mask_width=2,
+ true_image_shapes=true_image_shapes)
+ return instance_masks
+
+ instance_masks = self.execute_cpu(graph_fn, [])
+
+ expected_instance_masks = np.array(
+ [
+ [
+ # Mask 0 (class 0).
+ [[1, 1],
+ [1, 1]],
+ # Mask 1 (class 1).
+ [[1, 0],
+ [1, 0]],
+ # Mask 2 (class 0).
+ [[0, 0],
+ [0, 0]],
+ ]
+ ])
+ np.testing.assert_array_equal(expected_instance_masks, instance_masks)
+
+ def test_top_k_feature_map_locations(self):
+ feature_map_np = np.zeros((2, 3, 3, 2), dtype=np.float32)
+ feature_map_np[0, 2, 0, 1] = 1.0
+ feature_map_np[0, 2, 1, 1] = 0.9 # Get's filtered due to max pool.
+ feature_map_np[0, 0, 1, 0] = 0.7
+ feature_map_np[0, 2, 2, 0] = 0.5
+ feature_map_np[0, 2, 2, 1] = -0.3
+ feature_map_np[1, 2, 1, 1] = 0.7
+ feature_map_np[1, 1, 0, 0] = 0.4
+ feature_map_np[1, 1, 2, 0] = 0.1
+
+ def graph_fn():
+ feature_map = tf.constant(feature_map_np)
+ scores, y_inds, x_inds, channel_inds = (
+ cnma.top_k_feature_map_locations(
+ feature_map, max_pool_kernel_size=3, k=3))
+ return scores, y_inds, x_inds, channel_inds
+
+ scores, y_inds, x_inds, channel_inds = self.execute(graph_fn, [])
+
+ np.testing.assert_allclose([1.0, 0.7, 0.5], scores[0])
+ np.testing.assert_array_equal([2, 0, 2], y_inds[0])
+ np.testing.assert_array_equal([0, 1, 2], x_inds[0])
+ np.testing.assert_array_equal([1, 0, 0], channel_inds[0])
+
+ np.testing.assert_allclose([0.7, 0.4, 0.1], scores[1])
+ np.testing.assert_array_equal([2, 1, 1], y_inds[1])
+ np.testing.assert_array_equal([1, 0, 2], x_inds[1])
+ np.testing.assert_array_equal([1, 0, 0], channel_inds[1])
+
+ def test_top_k_feature_map_locations_no_pooling(self):
+ feature_map_np = np.zeros((2, 3, 3, 2), dtype=np.float32)
+ feature_map_np[0, 2, 0, 1] = 1.0
+ feature_map_np[0, 2, 1, 1] = 0.9
+ feature_map_np[0, 0, 1, 0] = 0.7
+ feature_map_np[0, 2, 2, 0] = 0.5
+ feature_map_np[0, 2, 2, 1] = -0.3
+ feature_map_np[1, 2, 1, 1] = 0.7
+ feature_map_np[1, 1, 0, 0] = 0.4
+ feature_map_np[1, 1, 2, 0] = 0.1
+
+ def graph_fn():
+ feature_map = tf.constant(feature_map_np)
+ scores, y_inds, x_inds, channel_inds = (
+ cnma.top_k_feature_map_locations(
+ feature_map, max_pool_kernel_size=1, k=3))
+ return scores, y_inds, x_inds, channel_inds
+
+ scores, y_inds, x_inds, channel_inds = self.execute(graph_fn, [])
+
+ np.testing.assert_allclose([1.0, 0.9, 0.7], scores[0])
+ np.testing.assert_array_equal([2, 2, 0], y_inds[0])
+ np.testing.assert_array_equal([0, 1, 1], x_inds[0])
+ np.testing.assert_array_equal([1, 1, 0], channel_inds[0])
+
+ np.testing.assert_allclose([0.7, 0.4, 0.1], scores[1])
+ np.testing.assert_array_equal([2, 1, 1], y_inds[1])
+ np.testing.assert_array_equal([1, 0, 2], x_inds[1])
+ np.testing.assert_array_equal([1, 0, 0], channel_inds[1])
+
+ def test_top_k_feature_map_locations_per_channel(self):
+ feature_map_np = np.zeros((2, 3, 3, 2), dtype=np.float32)
+ feature_map_np[0, 2, 0, 0] = 1.0 # Selected.
+ feature_map_np[0, 2, 1, 0] = 0.9 # Get's filtered due to max pool.
+ feature_map_np[0, 0, 1, 0] = 0.7 # Selected.
+ feature_map_np[0, 2, 2, 1] = 0.5 # Selected.
+ feature_map_np[0, 0, 0, 1] = 0.3 # Selected.
+ feature_map_np[1, 2, 1, 0] = 0.7 # Selected.
+ feature_map_np[1, 1, 0, 0] = 0.4 # Get's filtered due to max pool.
+ feature_map_np[1, 1, 2, 0] = 0.3 # Get's filtered due to max pool.
+ feature_map_np[1, 1, 0, 1] = 0.8 # Selected.
+ feature_map_np[1, 1, 2, 1] = 0.3 # Selected.
+
+ def graph_fn():
+ feature_map = tf.constant(feature_map_np)
+ scores, y_inds, x_inds, channel_inds = (
+ cnma.top_k_feature_map_locations(
+ feature_map, max_pool_kernel_size=3, k=2, per_channel=True))
+ return scores, y_inds, x_inds, channel_inds
+
+ scores, y_inds, x_inds, channel_inds = self.execute(graph_fn, [])
+
+ np.testing.assert_allclose([1.0, 0.7, 0.5, 0.3], scores[0])
+ np.testing.assert_array_equal([2, 0, 2, 0], y_inds[0])
+ np.testing.assert_array_equal([0, 1, 2, 0], x_inds[0])
+ np.testing.assert_array_equal([0, 0, 1, 1], channel_inds[0])
+
+ np.testing.assert_allclose([0.7, 0.0, 0.8, 0.3], scores[1])
+ np.testing.assert_array_equal([2, 0, 1, 1], y_inds[1])
+ np.testing.assert_array_equal([1, 0, 0, 2], x_inds[1])
+ np.testing.assert_array_equal([0, 0, 1, 1], channel_inds[1])
+
+ def test_box_prediction(self):
+
+ class_pred = np.zeros((3, 128, 128, 5), dtype=np.float32)
+ hw_pred = np.zeros((3, 128, 128, 2), dtype=np.float32)
+ offset_pred = np.zeros((3, 128, 128, 2), dtype=np.float32)
+
+ # Sample 1, 2 boxes
+ class_pred[0, 10, 20] = [0.3, .7, 0.0, 0.0, 0.0]
+ hw_pred[0, 10, 20] = [40, 60]
+ offset_pred[0, 10, 20] = [1, 2]
+
+ class_pred[0, 50, 60] = [0.55, 0.0, 0.0, 0.0, 0.45]
+ hw_pred[0, 50, 60] = [50, 50]
+ offset_pred[0, 50, 60] = [0, 0]
+
+ # Sample 2, 2 boxes (at same location)
+ class_pred[1, 100, 100] = [0.0, 0.1, 0.9, 0.0, 0.0]
+ hw_pred[1, 100, 100] = [10, 10]
+ offset_pred[1, 100, 100] = [1, 3]
+
+ # Sample 3, 3 boxes
+ class_pred[2, 60, 90] = [0.0, 0.0, 0.0, 0.2, 0.8]
+ hw_pred[2, 60, 90] = [40, 30]
+ offset_pred[2, 60, 90] = [0, 0]
+
+ class_pred[2, 65, 95] = [0.0, 0.7, 0.3, 0.0, 0.0]
+ hw_pred[2, 65, 95] = [20, 20]
+ offset_pred[2, 65, 95] = [1, 2]
+
+ class_pred[2, 75, 85] = [1.0, 0.0, 0.0, 0.0, 0.0]
+ hw_pred[2, 75, 85] = [21, 25]
+ offset_pred[2, 75, 85] = [5, 2]
+
+ def graph_fn():
+ class_pred_tensor = tf.constant(class_pred)
+ hw_pred_tensor = tf.constant(hw_pred)
+ offset_pred_tensor = tf.constant(offset_pred)
+
+ detection_scores, y_indices, x_indices, channel_indices = (
+ cnma.top_k_feature_map_locations(
+ class_pred_tensor, max_pool_kernel_size=3, k=2))
+
+ boxes, classes, scores, num_dets = cnma.prediction_tensors_to_boxes(
+ detection_scores, y_indices, x_indices, channel_indices,
+ hw_pred_tensor, offset_pred_tensor)
+ return boxes, classes, scores, num_dets
+
+ boxes, classes, scores, num_dets = self.execute(graph_fn, [])
+
+ np.testing.assert_array_equal(num_dets, [2, 2, 2])
+
+ np.testing.assert_allclose(
+ [[-9, -8, 31, 52], [25, 35, 75, 85]], boxes[0])
+ np.testing.assert_allclose(
+ [[96, 98, 106, 108], [96, 98, 106, 108]], boxes[1])
+ np.testing.assert_allclose(
+ [[69.5, 74.5, 90.5, 99.5], [40, 75, 80, 105]], boxes[2])
+
+ np.testing.assert_array_equal(classes[0], [1, 0])
+ np.testing.assert_array_equal(classes[1], [2, 1])
+ np.testing.assert_array_equal(classes[2], [0, 4])
+
+ np.testing.assert_allclose(scores[0], [.7, .55])
+ np.testing.assert_allclose(scores[1][:1], [.9])
+ np.testing.assert_allclose(scores[2], [1., .8])
+
+ def test_keypoint_candidate_prediction(self):
+ keypoint_heatmap_np = np.zeros((2, 3, 3, 2), dtype=np.float32)
+ keypoint_heatmap_np[0, 0, 0, 0] = 1.0
+ keypoint_heatmap_np[0, 2, 1, 0] = 0.7
+ keypoint_heatmap_np[0, 1, 1, 0] = 0.6
+ keypoint_heatmap_np[0, 0, 2, 1] = 0.7
+ keypoint_heatmap_np[0, 1, 1, 1] = 0.3 # Filtered by low score.
+ keypoint_heatmap_np[0, 2, 2, 1] = 0.2
+ keypoint_heatmap_np[1, 1, 0, 0] = 0.6
+ keypoint_heatmap_np[1, 2, 1, 0] = 0.5
+ keypoint_heatmap_np[1, 0, 0, 0] = 0.4
+ keypoint_heatmap_np[1, 0, 0, 1] = 1.0
+ keypoint_heatmap_np[1, 0, 1, 1] = 0.9
+ keypoint_heatmap_np[1, 2, 0, 1] = 0.8
+
+ keypoint_heatmap_offsets_np = np.zeros((2, 3, 3, 2), dtype=np.float32)
+ keypoint_heatmap_offsets_np[0, 0, 0] = [0.5, 0.25]
+ keypoint_heatmap_offsets_np[0, 2, 1] = [-0.25, 0.5]
+ keypoint_heatmap_offsets_np[0, 1, 1] = [0.0, 0.0]
+ keypoint_heatmap_offsets_np[0, 0, 2] = [1.0, 0.0]
+ keypoint_heatmap_offsets_np[0, 2, 2] = [1.0, 1.0]
+ keypoint_heatmap_offsets_np[1, 1, 0] = [0.25, 0.5]
+ keypoint_heatmap_offsets_np[1, 2, 1] = [0.5, 0.0]
+ keypoint_heatmap_offsets_np[1, 0, 0] = [0.0, -0.5]
+ keypoint_heatmap_offsets_np[1, 0, 1] = [0.5, -0.5]
+ keypoint_heatmap_offsets_np[1, 2, 0] = [-1.0, -0.5]
+
+ def graph_fn():
+ keypoint_heatmap = tf.constant(keypoint_heatmap_np, dtype=tf.float32)
+ keypoint_heatmap_offsets = tf.constant(
+ keypoint_heatmap_offsets_np, dtype=tf.float32)
+
+ keypoint_cands, keypoint_scores, num_keypoint_candidates = (
+ cnma.prediction_tensors_to_keypoint_candidates(
+ keypoint_heatmap,
+ keypoint_heatmap_offsets,
+ keypoint_score_threshold=0.5,
+ max_pool_kernel_size=1,
+ max_candidates=2))
+ return keypoint_cands, keypoint_scores, num_keypoint_candidates
+
+ (keypoint_cands, keypoint_scores,
+ num_keypoint_candidates) = self.execute(graph_fn, [])
+
+ expected_keypoint_candidates = [
+ [ # Example 0.
+ [[0.5, 0.25], [1.0, 2.0]], # Keypoint 1.
+ [[1.75, 1.5], [1.0, 1.0]], # Keypoint 2.
+ ],
+ [ # Example 1.
+ [[1.25, 0.5], [0.0, -0.5]], # Keypoint 1.
+ [[2.5, 1.0], [0.5, 0.5]], # Keypoint 2.
+ ],
+ ]
+ expected_keypoint_scores = [
+ [ # Example 0.
+ [1.0, 0.7], # Keypoint 1.
+ [0.7, 0.3], # Keypoint 2.
+ ],
+ [ # Example 1.
+ [0.6, 1.0], # Keypoint 1.
+ [0.5, 0.9], # Keypoint 2.
+ ],
+ ]
+ expected_num_keypoint_candidates = [
+ [2, 1],
+ [2, 2]
+ ]
+ np.testing.assert_allclose(expected_keypoint_candidates, keypoint_cands)
+ np.testing.assert_allclose(expected_keypoint_scores, keypoint_scores)
+ np.testing.assert_array_equal(expected_num_keypoint_candidates,
+ num_keypoint_candidates)
+
+ def test_keypoint_candidate_prediction_per_keypoints(self):
+ keypoint_heatmap_np = np.zeros((2, 3, 3, 2), dtype=np.float32)
+ keypoint_heatmap_np[0, 0, 0, 0] = 1.0
+ keypoint_heatmap_np[0, 2, 1, 0] = 0.7
+ keypoint_heatmap_np[0, 1, 1, 0] = 0.6
+ keypoint_heatmap_np[0, 0, 2, 1] = 0.7
+ keypoint_heatmap_np[0, 1, 1, 1] = 0.3 # Filtered by low score.
+ keypoint_heatmap_np[0, 2, 2, 1] = 0.2
+ keypoint_heatmap_np[1, 1, 0, 0] = 0.6
+ keypoint_heatmap_np[1, 2, 1, 0] = 0.5
+ keypoint_heatmap_np[1, 0, 0, 0] = 0.4
+ keypoint_heatmap_np[1, 0, 0, 1] = 1.0
+ keypoint_heatmap_np[1, 0, 1, 1] = 0.9
+ keypoint_heatmap_np[1, 2, 0, 1] = 0.8
+
+ keypoint_heatmap_offsets_np = np.zeros((2, 3, 3, 4), dtype=np.float32)
+ keypoint_heatmap_offsets_np[0, 0, 0] = [0.5, 0.25, 0.0, 0.0]
+ keypoint_heatmap_offsets_np[0, 2, 1] = [-0.25, 0.5, 0.0, 0.0]
+ keypoint_heatmap_offsets_np[0, 1, 1] = [0.0, 0.0, 0.0, 0.0]
+ keypoint_heatmap_offsets_np[0, 0, 2] = [0.0, 0.0, 1.0, 0.0]
+ keypoint_heatmap_offsets_np[0, 2, 2] = [0.0, 0.0, 1.0, 1.0]
+ keypoint_heatmap_offsets_np[1, 1, 0] = [0.25, 0.5, 0.0, 0.0]
+ keypoint_heatmap_offsets_np[1, 2, 1] = [0.5, 0.0, 0.0, 0.0]
+ keypoint_heatmap_offsets_np[1, 0, 0] = [0.0, 0.0, 0.0, -0.5]
+ keypoint_heatmap_offsets_np[1, 0, 1] = [0.0, 0.0, 0.5, -0.5]
+ keypoint_heatmap_offsets_np[1, 2, 0] = [0.0, 0.0, -1.0, -0.5]
+
+ def graph_fn():
+ keypoint_heatmap = tf.constant(keypoint_heatmap_np, dtype=tf.float32)
+ keypoint_heatmap_offsets = tf.constant(
+ keypoint_heatmap_offsets_np, dtype=tf.float32)
+
+ keypoint_cands, keypoint_scores, num_keypoint_candidates = (
+ cnma.prediction_tensors_to_keypoint_candidates(
+ keypoint_heatmap,
+ keypoint_heatmap_offsets,
+ keypoint_score_threshold=0.5,
+ max_pool_kernel_size=1,
+ max_candidates=2))
+ return keypoint_cands, keypoint_scores, num_keypoint_candidates
+
+ (keypoint_cands, keypoint_scores,
+ num_keypoint_candidates) = self.execute(graph_fn, [])
+
+ expected_keypoint_candidates = [
+ [ # Example 0.
+ [[0.5, 0.25], [1.0, 2.0]], # Candidate 1 of keypoint 1, 2.
+ [[1.75, 1.5], [1.0, 1.0]], # Candidate 2 of keypoint 1, 2.
+ ],
+ [ # Example 1.
+ [[1.25, 0.5], [0.0, -0.5]], # Candidate 1 of keypoint 1, 2.
+ [[2.5, 1.0], [0.5, 0.5]], # Candidate 2 of keypoint 1, 2.
+ ],
+ ]
+ expected_keypoint_scores = [
+ [ # Example 0.
+ [1.0, 0.7], # Candidate 1 scores of keypoint 1, 2.
+ [0.7, 0.3], # Candidate 2 scores of keypoint 1, 2.
+ ],
+ [ # Example 1.
+ [0.6, 1.0], # Candidate 1 scores of keypoint 1, 2.
+ [0.5, 0.9], # Candidate 2 scores of keypoint 1, 2.
+ ],
+ ]
+ expected_num_keypoint_candidates = [
+ [2, 1],
+ [2, 2]
+ ]
+ np.testing.assert_allclose(expected_keypoint_candidates, keypoint_cands)
+ np.testing.assert_allclose(expected_keypoint_scores, keypoint_scores)
+ np.testing.assert_array_equal(expected_num_keypoint_candidates,
+ num_keypoint_candidates)
+
+ def test_regressed_keypoints_at_object_centers(self):
+ batch_size = 2
+ num_keypoints = 5
+ num_instances = 6
+ regressed_keypoint_feature_map_np = np.random.randn(
+ batch_size, 10, 10, 2 * num_keypoints).astype(np.float32)
+ y_indices = np.random.choice(10, (batch_size, num_instances))
+ x_indices = np.random.choice(10, (batch_size, num_instances))
+ offsets = np.stack([y_indices, x_indices], axis=2).astype(np.float32)
+
+ def graph_fn():
+ regressed_keypoint_feature_map = tf.constant(
+ regressed_keypoint_feature_map_np, dtype=tf.float32)
+
+ gathered_regressed_keypoints = (
+ cnma.regressed_keypoints_at_object_centers(
+ regressed_keypoint_feature_map,
+ tf.constant(y_indices, dtype=tf.int32),
+ tf.constant(x_indices, dtype=tf.int32)))
+ return gathered_regressed_keypoints
+
+ gathered_regressed_keypoints = self.execute(graph_fn, [])
+
+ expected_gathered_keypoints_0 = regressed_keypoint_feature_map_np[
+ 0, y_indices[0], x_indices[0], :]
+ expected_gathered_keypoints_1 = regressed_keypoint_feature_map_np[
+ 1, y_indices[1], x_indices[1], :]
+ expected_gathered_keypoints = np.stack([
+ expected_gathered_keypoints_0,
+ expected_gathered_keypoints_1], axis=0)
+ expected_gathered_keypoints = np.reshape(
+ expected_gathered_keypoints,
+ [batch_size, num_instances, num_keypoints, 2])
+ expected_gathered_keypoints += np.expand_dims(offsets, axis=2)
+ expected_gathered_keypoints = np.reshape(
+ expected_gathered_keypoints,
+ [batch_size, num_instances, -1])
+ np.testing.assert_allclose(expected_gathered_keypoints,
+ gathered_regressed_keypoints)
+
+ @parameterized.parameters(
+ {'candidate_ranking_mode': 'min_distance'},
+ {'candidate_ranking_mode': 'score_distance_ratio'},
+ )
+ def test_refine_keypoints(self, candidate_ranking_mode):
+ regressed_keypoints_np = np.array(
+ [
+ # Example 0.
+ [
+ [[2.0, 2.0], [6.0, 10.0], [14.0, 7.0]], # Instance 0.
+ [[0.0, 6.0], [3.0, 3.0], [5.0, 7.0]], # Instance 1.
+ ],
+ # Example 1.
+ [
+ [[6.0, 2.0], [0.0, 0.0], [0.1, 0.1]], # Instance 0.
+ [[6.0, 2.5], [5.0, 5.0], [9.0, 3.0]], # Instance 1.
+ ],
+ ], dtype=np.float32)
+ keypoint_candidates_np = np.array(
+ [
+ # Example 0.
+ [
+ [[2.0, 2.5], [6.0, 10.5], [4.0, 7.0]], # Candidate 0.
+ [[1.0, 8.0], [0.0, 0.0], [2.0, 2.0]], # Candidate 1.
+ [[0.0, 0.0], [0.0, 0.0], [0.0, 0.0]], # Candidate 2.
+ ],
+ # Example 1.
+ [
+ [[6.0, 1.5], [0.1, 0.4], [0.0, 0.0]], # Candidate 0.
+ [[1.0, 4.0], [0.0, 0.3], [0.0, 0.0]], # Candidate 1.
+ [[0.0, 0.0], [0.1, 0.3], [0.0, 0.0]], # Candidate 2.
+ ]
+ ], dtype=np.float32)
+ keypoint_scores_np = np.array(
+ [
+ # Example 0.
+ [
+ [0.8, 0.9, 1.0], # Candidate 0.
+ [0.6, 0.1, 0.9], # Candidate 1.
+ [0.0, 0.0, 0.0], # Candidate 1.
+ ],
+ # Example 1.
+ [
+ [0.7, 0.3, 0.0], # Candidate 0.
+ [0.6, 0.1, 0.0], # Candidate 1.
+ [0.0, 0.28, 0.0], # Candidate 1.
+ ]
+ ], dtype=np.float32)
+ num_keypoints_candidates_np = np.array(
+ [
+ # Example 0.
+ [2, 2, 2],
+ # Example 1.
+ [2, 3, 0],
+ ], dtype=np.int32)
+ unmatched_keypoint_score = 0.1
+
+ def graph_fn():
+ regressed_keypoints = tf.constant(
+ regressed_keypoints_np, dtype=tf.float32)
+ keypoint_candidates = tf.constant(
+ keypoint_candidates_np, dtype=tf.float32)
+ keypoint_scores = tf.constant(keypoint_scores_np, dtype=tf.float32)
+ num_keypoint_candidates = tf.constant(num_keypoints_candidates_np,
+ dtype=tf.int32)
+ refined_keypoints, refined_scores = cnma.refine_keypoints(
+ regressed_keypoints, keypoint_candidates, keypoint_scores,
+ num_keypoint_candidates, bboxes=None,
+ unmatched_keypoint_score=unmatched_keypoint_score,
+ box_scale=1.2, candidate_search_scale=0.3,
+ candidate_ranking_mode=candidate_ranking_mode)
+ return refined_keypoints, refined_scores
+
+ refined_keypoints, refined_scores = self.execute(graph_fn, [])
+
+ if candidate_ranking_mode == 'min_distance':
+ expected_refined_keypoints = np.array(
+ [
+ # Example 0.
+ [
+ [[2.0, 2.5], [6.0, 10.5], [14.0, 7.0]], # Instance 0.
+ [[0.0, 6.0], [3.0, 3.0], [4.0, 7.0]], # Instance 1.
+ ],
+ # Example 1.
+ [
+ [[6.0, 1.5], [0.0, 0.3], [0.1, 0.1]], # Instance 0.
+ [[6.0, 2.5], [5.0, 5.0], [9.0, 3.0]], # Instance 1.
+ ],
+ ], dtype=np.float32)
+ expected_refined_scores = np.array(
+ [
+ # Example 0.
+ [
+ [0.8, 0.9, unmatched_keypoint_score], # Instance 0.
+ [unmatched_keypoint_score, # Instance 1.
+ unmatched_keypoint_score, 1.0],
+ ],
+ # Example 1.
+ [
+ [0.7, 0.1, unmatched_keypoint_score], # Instance 0.
+ [unmatched_keypoint_score, # Instance 1.
+ 0.1, unmatched_keypoint_score],
+ ],
+ ], dtype=np.float32)
+ else:
+ expected_refined_keypoints = np.array(
+ [
+ # Example 0.
+ [
+ [[2.0, 2.5], [6.0, 10.5], [14.0, 7.0]], # Instance 0.
+ [[0.0, 6.0], [3.0, 3.0], [4.0, 7.0]], # Instance 1.
+ ],
+ # Example 1.
+ [
+ [[6.0, 1.5], [0.1, 0.3], [0.1, 0.1]], # Instance 0.
+ [[6.0, 2.5], [5.0, 5.0], [9.0, 3.0]], # Instance 1.
+ ],
+ ], dtype=np.float32)
+ expected_refined_scores = np.array(
+ [
+ # Example 0.
+ [
+ [0.8, 0.9, unmatched_keypoint_score], # Instance 0.
+ [unmatched_keypoint_score, # Instance 1.
+ unmatched_keypoint_score, 1.0],
+ ],
+ # Example 1.
+ [
+ [0.7, 0.28, unmatched_keypoint_score], # Instance 0.
+ [unmatched_keypoint_score, # Instance 1.
+ 0.1, unmatched_keypoint_score],
+ ],
+ ], dtype=np.float32)
+
+ np.testing.assert_allclose(expected_refined_keypoints, refined_keypoints)
+ np.testing.assert_allclose(expected_refined_scores, refined_scores)
+
+ def test_refine_keypoints_with_bboxes(self):
+ regressed_keypoints_np = np.array(
+ [
+ # Example 0.
+ [
+ [[2.0, 2.0], [6.0, 10.0], [14.0, 7.0]], # Instance 0.
+ [[0.0, 6.0], [3.0, 3.0], [5.0, 7.0]], # Instance 1.
+ ],
+ # Example 1.
+ [
+ [[6.0, 2.0], [0.0, 0.0], [0.1, 0.1]], # Instance 0.
+ [[6.0, 2.5], [5.0, 5.0], [9.0, 3.0]], # Instance 1.
+ ],
+ ], dtype=np.float32)
+ keypoint_candidates_np = np.array(
+ [
+ # Example 0.
+ [
+ [[2.0, 2.5], [6.0, 10.5], [4.0, 7.0]], # Candidate 0.
+ [[1.0, 8.0], [0.0, 0.0], [2.0, 2.0]], # Candidate 1.
+ ],
+ # Example 1.
+ [
+ [[6.0, 1.5], [5.0, 5.0], [0.0, 0.0]], # Candidate 0.
+ [[1.0, 4.0], [0.0, 0.3], [0.0, 0.0]], # Candidate 1.
+ ]
+ ], dtype=np.float32)
+ keypoint_scores_np = np.array(
+ [
+ # Example 0.
+ [
+ [0.8, 0.9, 1.0], # Candidate 0.
+ [0.6, 0.1, 0.9], # Candidate 1.
+ ],
+ # Example 1.
+ [
+ [0.7, 0.4, 0.0], # Candidate 0.
+ [0.6, 0.1, 0.0], # Candidate 1.
+ ]
+ ], dtype=np.float32)
+ num_keypoints_candidates_np = np.array(
+ [
+ # Example 0.
+ [2, 2, 2],
+ # Example 1.
+ [2, 2, 0],
+ ], dtype=np.int32)
+ bboxes_np = np.array(
+ [
+ # Example 0.
+ [
+ [2.0, 2.0, 14.0, 10.0], # Instance 0.
+ [0.0, 3.0, 5.0, 7.0], # Instance 1.
+ ],
+ # Example 1.
+ [
+ [0.0, 0.0, 6.0, 2.0], # Instance 0.
+ [5.0, 1.4, 9.0, 5.0], # Instance 1.
+ ],
+ ], dtype=np.float32)
+ unmatched_keypoint_score = 0.1
+
+ def graph_fn():
+ regressed_keypoints = tf.constant(
+ regressed_keypoints_np, dtype=tf.float32)
+ keypoint_candidates = tf.constant(
+ keypoint_candidates_np, dtype=tf.float32)
+ keypoint_scores = tf.constant(keypoint_scores_np, dtype=tf.float32)
+ num_keypoint_candidates = tf.constant(num_keypoints_candidates_np,
+ dtype=tf.int32)
+ bboxes = tf.constant(bboxes_np, dtype=tf.float32)
+ refined_keypoints, refined_scores = cnma.refine_keypoints(
+ regressed_keypoints, keypoint_candidates, keypoint_scores,
+ num_keypoint_candidates, bboxes=bboxes,
+ unmatched_keypoint_score=unmatched_keypoint_score,
+ box_scale=1.0, candidate_search_scale=0.3)
+ return refined_keypoints, refined_scores
+
+ refined_keypoints, refined_scores = self.execute(graph_fn, [])
+
+ expected_refined_keypoints = np.array(
+ [
+ # Example 0.
+ [
+ [[2.0, 2.5], [6.0, 10.0], [14.0, 7.0]], # Instance 0.
+ [[0.0, 6.0], [3.0, 3.0], [4.0, 7.0]], # Instance 1.
+ ],
+ # Example 1.
+ [
+ [[6.0, 1.5], [0.0, 0.3], [0.1, 0.1]], # Instance 0.
+ [[6.0, 1.5], [5.0, 5.0], [9.0, 3.0]], # Instance 1.
+ ],
+ ], dtype=np.float32)
+ expected_refined_scores = np.array(
+ [
+ # Example 0.
+ [
+ [0.8, unmatched_keypoint_score, # Instance 0.
+ unmatched_keypoint_score],
+ [unmatched_keypoint_score, # Instance 1.
+ unmatched_keypoint_score, 1.0],
+ ],
+ # Example 1.
+ [
+ [0.7, 0.1, unmatched_keypoint_score], # Instance 0.
+ [0.7, 0.4, unmatched_keypoint_score], # Instance 1.
+ ],
+ ], dtype=np.float32)
+
+ np.testing.assert_allclose(expected_refined_keypoints, refined_keypoints)
+ np.testing.assert_allclose(expected_refined_scores, refined_scores)
+
+ def test_pad_to_full_keypoint_dim(self):
+ batch_size = 4
+ num_instances = 8
+ num_keypoints = 2
+ keypoint_inds = [1, 3]
+ num_total_keypoints = 5
+
+ kpt_coords_np = np.random.randn(batch_size, num_instances, num_keypoints, 2)
+ kpt_scores_np = np.random.randn(batch_size, num_instances, num_keypoints)
+
+ def graph_fn():
+ kpt_coords = tf.constant(kpt_coords_np)
+ kpt_scores = tf.constant(kpt_scores_np)
+ kpt_coords_padded, kpt_scores_padded = (
+ cnma._pad_to_full_keypoint_dim(
+ kpt_coords, kpt_scores, keypoint_inds, num_total_keypoints))
+ return kpt_coords_padded, kpt_scores_padded
+
+ kpt_coords_padded, kpt_scores_padded = self.execute(graph_fn, [])
+
+ self.assertAllEqual([batch_size, num_instances, num_total_keypoints, 2],
+ kpt_coords_padded.shape)
+ self.assertAllEqual([batch_size, num_instances, num_total_keypoints],
+ kpt_scores_padded.shape)
+
+ for i, kpt_ind in enumerate(keypoint_inds):
+ np.testing.assert_allclose(kpt_coords_np[:, :, i, :],
+ kpt_coords_padded[:, :, kpt_ind, :])
+ np.testing.assert_allclose(kpt_scores_np[:, :, i],
+ kpt_scores_padded[:, :, kpt_ind])
+
+ def test_pad_to_full_instance_dim(self):
+ batch_size = 4
+ max_instances = 8
+ num_keypoints = 6
+ num_instances = 2
+ instance_inds = [1, 3]
+
+ kpt_coords_np = np.random.randn(batch_size, num_instances, num_keypoints, 2)
+ kpt_scores_np = np.random.randn(batch_size, num_instances, num_keypoints)
+
+ def graph_fn():
+ kpt_coords = tf.constant(kpt_coords_np)
+ kpt_scores = tf.constant(kpt_scores_np)
+ kpt_coords_padded, kpt_scores_padded = (
+ cnma._pad_to_full_instance_dim(
+ kpt_coords, kpt_scores, instance_inds, max_instances))
+ return kpt_coords_padded, kpt_scores_padded
+
+ kpt_coords_padded, kpt_scores_padded = self.execute(graph_fn, [])
+
+ self.assertAllEqual([batch_size, max_instances, num_keypoints, 2],
+ kpt_coords_padded.shape)
+ self.assertAllEqual([batch_size, max_instances, num_keypoints],
+ kpt_scores_padded.shape)
+
+ for i, inst_ind in enumerate(instance_inds):
+ np.testing.assert_allclose(kpt_coords_np[:, i, :, :],
+ kpt_coords_padded[:, inst_ind, :, :])
+ np.testing.assert_allclose(kpt_scores_np[:, i, :],
+ kpt_scores_padded[:, inst_ind, :])
+
+
+# Common parameters for setting up testing examples across tests.
+_NUM_CLASSES = 10
+_KEYPOINT_INDICES = [0, 1, 2, 3]
+_NUM_KEYPOINTS = len(_KEYPOINT_INDICES)
+_TASK_NAME = 'human_pose'
+
+
+def get_fake_center_params():
+ """Returns the fake object center parameter namedtuple."""
+ return cnma.ObjectCenterParams(
+ classification_loss=losses.WeightedSigmoidClassificationLoss(),
+ object_center_loss_weight=1.0,
+ min_box_overlap_iou=1.0,
+ max_box_predictions=5,
+ use_labeled_classes=False)
+
+
+def get_fake_od_params():
+ """Returns the fake object detection parameter namedtuple."""
+ return cnma.ObjectDetectionParams(
+ localization_loss=losses.L1LocalizationLoss(),
+ offset_loss_weight=1.0,
+ scale_loss_weight=0.1)
+
+
+def get_fake_kp_params():
+ """Returns the fake keypoint estimation parameter namedtuple."""
+ return cnma.KeypointEstimationParams(
+ task_name=_TASK_NAME,
+ class_id=1,
+ keypoint_indices=_KEYPOINT_INDICES,
+ keypoint_std_dev=[0.00001] * len(_KEYPOINT_INDICES),
+ classification_loss=losses.WeightedSigmoidClassificationLoss(),
+ localization_loss=losses.L1LocalizationLoss(),
+ keypoint_candidate_score_threshold=0.1)
+
+
+def get_fake_mask_params():
+ """Returns the fake mask estimation parameter namedtuple."""
+ return cnma.MaskParams(
+ classification_loss=losses.WeightedSoftmaxClassificationLoss(),
+ task_loss_weight=1.0,
+ mask_height=4,
+ mask_width=4)
+
+
+def build_center_net_meta_arch(build_resnet=False):
+ """Builds the CenterNet meta architecture."""
+ if build_resnet:
+ feature_extractor = (
+ center_net_resnet_feature_extractor.CenterNetResnetFeatureExtractor(
+ 'resnet_v2_101'))
+ else:
+ feature_extractor = DummyFeatureExtractor(
+ channel_means=(1.0, 2.0, 3.0),
+ channel_stds=(10., 20., 30.),
+ bgr_ordering=False,
+ num_feature_outputs=2,
+ stride=4)
+ image_resizer_fn = functools.partial(
+ preprocessor.resize_to_range,
+ min_dimension=128,
+ max_dimension=128,
+ pad_to_max_dimesnion=True)
+ return cnma.CenterNetMetaArch(
+ is_training=True,
+ add_summaries=False,
+ num_classes=_NUM_CLASSES,
+ feature_extractor=feature_extractor,
+ image_resizer_fn=image_resizer_fn,
+ object_center_params=get_fake_center_params(),
+ object_detection_params=get_fake_od_params(),
+ keypoint_params_dict={_TASK_NAME: get_fake_kp_params()},
+ mask_params=get_fake_mask_params())
+
+
+def _logit(p):
+ return np.log(
+ (p + np.finfo(np.float32).eps) / (1 - p + np.finfo(np.float32).eps))
+
+
+@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.')
+class CenterNetMetaArchLibTest(test_case.TestCase):
+ """Test for CenterNet meta architecture related functions."""
+
+ def test_get_keypoint_name(self):
+ self.assertEqual('human_pose/keypoint_offset',
+ cnma.get_keypoint_name('human_pose', 'keypoint_offset'))
+
+ def test_get_num_instances_from_weights(self):
+ weight1 = tf.constant([0.0, 0.0, 0.0], dtype=tf.float32)
+ weight2 = tf.constant([0.5, 0.9, 0.0], dtype=tf.float32)
+ weight3 = tf.constant([0.0, 0.0, 1.0], dtype=tf.float32)
+
+ def graph_fn_1():
+ # Total of three elements with non-zero values.
+ num_instances = cnma.get_num_instances_from_weights(
+ [weight1, weight2, weight3])
+ return num_instances
+ num_instances = self.execute(graph_fn_1, [])
+ self.assertAlmostEqual(3, num_instances)
+
+ # No non-zero value in the weights. Return minimum value: 1.
+ def graph_fn_2():
+ # Total of three elements with non-zero values.
+ num_instances = cnma.get_num_instances_from_weights([weight1, weight1])
+ return num_instances
+ num_instances = self.execute(graph_fn_2, [])
+ self.assertAlmostEqual(1, num_instances)
+
+
+@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.')
+class CenterNetMetaArchTest(test_case.TestCase, parameterized.TestCase):
+ """Tests for the CenterNet meta architecture."""
+
+ def test_construct_prediction_heads(self):
+ model = build_center_net_meta_arch()
+ fake_feature_map = np.zeros((4, 128, 128, 8))
+
+ # Check the dictionary contains expected keys and corresponding heads with
+ # correct dimensions.
+ # "object center" head:
+ output = model._prediction_head_dict[cnma.OBJECT_CENTER][-1](
+ fake_feature_map)
+ self.assertEqual((4, 128, 128, _NUM_CLASSES), output.shape)
+
+ # "object scale" (height/width) head:
+ output = model._prediction_head_dict[cnma.BOX_SCALE][-1](fake_feature_map)
+ self.assertEqual((4, 128, 128, 2), output.shape)
+
+ # "object offset" head:
+ output = model._prediction_head_dict[cnma.BOX_OFFSET][-1](fake_feature_map)
+ self.assertEqual((4, 128, 128, 2), output.shape)
+
+ # "keypoint offset" head:
+ output = model._prediction_head_dict[
+ cnma.get_keypoint_name(_TASK_NAME, cnma.KEYPOINT_OFFSET)][-1](
+ fake_feature_map)
+ self.assertEqual((4, 128, 128, 2), output.shape)
+
+ # "keypoint heatmap" head:
+ output = model._prediction_head_dict[cnma.get_keypoint_name(
+ _TASK_NAME, cnma.KEYPOINT_HEATMAP)][-1](
+ fake_feature_map)
+ self.assertEqual((4, 128, 128, _NUM_KEYPOINTS), output.shape)
+
+ # "keypoint regression" head:
+ output = model._prediction_head_dict[cnma.get_keypoint_name(
+ _TASK_NAME, cnma.KEYPOINT_REGRESSION)][-1](
+ fake_feature_map)
+ self.assertEqual((4, 128, 128, 2 * _NUM_KEYPOINTS), output.shape)
+
+ # "mask" head:
+ output = model._prediction_head_dict[cnma.SEGMENTATION_HEATMAP][-1](
+ fake_feature_map)
+ self.assertEqual((4, 128, 128, _NUM_CLASSES), output.shape)
+
+ def test_initialize_target_assigners(self):
+ model = build_center_net_meta_arch()
+ assigner_dict = model._initialize_target_assigners(
+ stride=2,
+ min_box_overlap_iou=0.7)
+
+ # Check whether the correponding target assigner class is initialized.
+ # object center target assigner:
+ self.assertIsInstance(assigner_dict[cnma.OBJECT_CENTER],
+ cn_assigner.CenterNetCenterHeatmapTargetAssigner)
+
+ # object detection target assigner:
+ self.assertIsInstance(assigner_dict[cnma.DETECTION_TASK],
+ cn_assigner.CenterNetBoxTargetAssigner)
+
+ # keypoint estimation target assigner:
+ self.assertIsInstance(assigner_dict[_TASK_NAME],
+ cn_assigner.CenterNetKeypointTargetAssigner)
+
+ # mask estimation target assigner:
+ self.assertIsInstance(assigner_dict[cnma.SEGMENTATION_TASK],
+ cn_assigner.CenterNetMaskTargetAssigner)
+
+ def test_predict(self):
+ """Test the predict function."""
+
+ model = build_center_net_meta_arch()
+ def graph_fn():
+ prediction_dict = model.predict(tf.zeros([2, 128, 128, 3]), None)
+ return prediction_dict
+
+ prediction_dict = self.execute(graph_fn, [])
+
+ self.assertEqual(prediction_dict['preprocessed_inputs'].shape,
+ (2, 128, 128, 3))
+ self.assertEqual(prediction_dict[cnma.OBJECT_CENTER][0].shape,
+ (2, 32, 32, _NUM_CLASSES))
+ self.assertEqual(prediction_dict[cnma.BOX_SCALE][0].shape,
+ (2, 32, 32, 2))
+ self.assertEqual(prediction_dict[cnma.BOX_OFFSET][0].shape,
+ (2, 32, 32, 2))
+ self.assertEqual(prediction_dict[cnma.SEGMENTATION_HEATMAP][0].shape,
+ (2, 32, 32, _NUM_CLASSES))
+
+ def test_loss(self):
+ """Test the loss function."""
+ groundtruth_dict = get_fake_groundtruth_dict(16, 32, 4)
+ model = build_center_net_meta_arch()
+ model.provide_groundtruth(
+ groundtruth_boxes_list=groundtruth_dict[fields.BoxListFields.boxes],
+ groundtruth_weights_list=groundtruth_dict[fields.BoxListFields.weights],
+ groundtruth_classes_list=groundtruth_dict[fields.BoxListFields.classes],
+ groundtruth_keypoints_list=groundtruth_dict[
+ fields.BoxListFields.keypoints],
+ groundtruth_masks_list=groundtruth_dict[
+ fields.BoxListFields.masks])
+
+ prediction_dict = get_fake_prediction_dict(
+ input_height=16, input_width=32, stride=4)
+
+ def graph_fn():
+ loss_dict = model.loss(prediction_dict,
+ tf.constant([[16, 24, 3], [16, 24, 3]]))
+ return loss_dict
+
+ loss_dict = self.execute(graph_fn, [])
+
+ # The prediction and groundtruth are curated to produce very low loss.
+ self.assertGreater(
+ 0.01, loss_dict['%s/%s' % (cnma.LOSS_KEY_PREFIX, cnma.OBJECT_CENTER)])
+ self.assertGreater(
+ 0.01, loss_dict['%s/%s' % (cnma.LOSS_KEY_PREFIX, cnma.BOX_SCALE)])
+ self.assertGreater(
+ 0.01, loss_dict['%s/%s' % (cnma.LOSS_KEY_PREFIX, cnma.BOX_OFFSET)])
+ self.assertGreater(
+ 0.01,
+ loss_dict['%s/%s' %
+ (cnma.LOSS_KEY_PREFIX,
+ cnma.get_keypoint_name(_TASK_NAME, cnma.KEYPOINT_HEATMAP))])
+ self.assertGreater(
+ 0.01,
+ loss_dict['%s/%s' %
+ (cnma.LOSS_KEY_PREFIX,
+ cnma.get_keypoint_name(_TASK_NAME, cnma.KEYPOINT_OFFSET))])
+ self.assertGreater(
+ 0.01, loss_dict['%s/%s' % (cnma.LOSS_KEY_PREFIX,
+ cnma.get_keypoint_name(
+ _TASK_NAME, cnma.KEYPOINT_REGRESSION))])
+ self.assertGreater(
+ 0.01, loss_dict['%s/%s' % (cnma.LOSS_KEY_PREFIX,
+ cnma.SEGMENTATION_HEATMAP)])
+
+ @parameterized.parameters(
+ {'target_class_id': 1},
+ {'target_class_id': 2},
+ )
+ def test_postprocess(self, target_class_id):
+ """Test the postprocess function."""
+ model = build_center_net_meta_arch()
+ max_detection = model._center_params.max_box_predictions
+ num_keypoints = len(model._kp_params_dict[_TASK_NAME].keypoint_indices)
+
+ class_center = np.zeros((1, 32, 32, 10), dtype=np.float32)
+ height_width = np.zeros((1, 32, 32, 2), dtype=np.float32)
+ offset = np.zeros((1, 32, 32, 2), dtype=np.float32)
+ keypoint_heatmaps = np.zeros((1, 32, 32, num_keypoints), dtype=np.float32)
+ keypoint_offsets = np.zeros((1, 32, 32, 2), dtype=np.float32)
+ keypoint_regression = np.random.randn(1, 32, 32, num_keypoints * 2)
+
+ class_probs = np.zeros(10)
+ class_probs[target_class_id] = _logit(0.75)
+ class_center[0, 16, 16] = class_probs
+ height_width[0, 16, 16] = [5, 10]
+ offset[0, 16, 16] = [.25, .5]
+ keypoint_regression[0, 16, 16] = [
+ -1., -1.,
+ -1., 1.,
+ 1., -1.,
+ 1., 1.]
+ keypoint_heatmaps[0, 14, 14, 0] = _logit(0.9)
+ keypoint_heatmaps[0, 14, 18, 1] = _logit(0.9)
+ keypoint_heatmaps[0, 18, 14, 2] = _logit(0.9)
+ keypoint_heatmaps[0, 18, 18, 3] = _logit(0.05) # Note the low score.
+
+ segmentation_heatmap = np.zeros((1, 32, 32, 10), dtype=np.float32)
+ segmentation_heatmap[:, 14:18, 14:18, target_class_id] = 1.0
+ segmentation_heatmap = _logit(segmentation_heatmap)
+
+ class_center = tf.constant(class_center)
+ height_width = tf.constant(height_width)
+ offset = tf.constant(offset)
+ keypoint_heatmaps = tf.constant(keypoint_heatmaps, dtype=tf.float32)
+ keypoint_offsets = tf.constant(keypoint_offsets, dtype=tf.float32)
+ keypoint_regression = tf.constant(keypoint_regression, dtype=tf.float32)
+ segmentation_heatmap = tf.constant(segmentation_heatmap, dtype=tf.float32)
+
+ prediction_dict = {
+ cnma.OBJECT_CENTER: [class_center],
+ cnma.BOX_SCALE: [height_width],
+ cnma.BOX_OFFSET: [offset],
+ cnma.get_keypoint_name(_TASK_NAME, cnma.KEYPOINT_HEATMAP):
+ [keypoint_heatmaps],
+ cnma.get_keypoint_name(_TASK_NAME, cnma.KEYPOINT_OFFSET):
+ [keypoint_offsets],
+ cnma.get_keypoint_name(_TASK_NAME, cnma.KEYPOINT_REGRESSION):
+ [keypoint_regression],
+ cnma.SEGMENTATION_HEATMAP: [segmentation_heatmap],
+ }
+
+ def graph_fn():
+ detections = model.postprocess(prediction_dict,
+ tf.constant([[128, 128, 3]]))
+ return detections
+
+ detections = self.execute_cpu(graph_fn, [])
+
+ self.assertAllClose(detections['detection_boxes'][0, 0],
+ np.array([55, 46, 75, 86]) / 128.0)
+ self.assertAllClose(detections['detection_scores'][0],
+ [.75, .5, .5, .5, .5])
+ self.assertEqual(detections['detection_classes'][0, 0], target_class_id)
+ self.assertEqual(detections['num_detections'], [5])
+ self.assertAllEqual([1, max_detection, num_keypoints, 2],
+ detections['detection_keypoints'].shape)
+ self.assertAllEqual([1, max_detection, num_keypoints],
+ detections['detection_keypoint_scores'].shape)
+ self.assertAllEqual([1, max_detection, 4, 4],
+ detections['detection_masks'].shape)
+
+ # There should be some section of the first mask (correspond to the only
+ # detection) with non-zero mask values.
+ self.assertGreater(np.sum(detections['detection_masks'][0, 0, :, :] > 0), 0)
+ self.assertAllEqual(
+ detections['detection_masks'][0, 1:, :, :],
+ np.zeros_like(detections['detection_masks'][0, 1:, :, :]))
+
+ if target_class_id == 1:
+ expected_kpts_for_obj_0 = np.array(
+ [[14., 14.], [14., 18.], [18., 14.], [17., 17.]]) / 32.
+ expected_kpt_scores_for_obj_0 = np.array(
+ [0.9, 0.9, 0.9, cnma.UNMATCHED_KEYPOINT_SCORE])
+ np.testing.assert_allclose(detections['detection_keypoints'][0][0],
+ expected_kpts_for_obj_0, rtol=1e-6)
+ np.testing.assert_allclose(detections['detection_keypoint_scores'][0][0],
+ expected_kpt_scores_for_obj_0, rtol=1e-6)
+ else:
+ # All keypoint outputs should be zeros.
+ np.testing.assert_allclose(
+ detections['detection_keypoints'][0][0],
+ np.zeros([num_keypoints, 2], np.float),
+ rtol=1e-6)
+ np.testing.assert_allclose(
+ detections['detection_keypoint_scores'][0][0],
+ np.zeros([num_keypoints], np.float),
+ rtol=1e-6)
+
+ def test_get_instance_indices(self):
+ classes = tf.constant([[0, 1, 2, 0], [2, 1, 2, 2]], dtype=tf.int32)
+ num_detections = tf.constant([1, 3], dtype=tf.int32)
+ batch_index = 1
+ class_id = 2
+ model = build_center_net_meta_arch()
+ valid_indices = model._get_instance_indices(
+ classes, num_detections, batch_index, class_id)
+ self.assertAllEqual(valid_indices.numpy(), [0, 2])
+
+
+def get_fake_prediction_dict(input_height, input_width, stride):
+ """Prepares the fake prediction dictionary."""
+ output_height = input_height // stride
+ output_width = input_width // stride
+ object_center = np.zeros((2, output_height, output_width, _NUM_CLASSES),
+ dtype=np.float32)
+ # Box center:
+ # y: floor((0.54 + 0.56) / 2 * 4) = 2,
+ # x: floor((0.54 + 0.56) / 2 * 8) = 4
+ object_center[0, 2, 4, 1] = 1.0
+ object_center = _logit(object_center)
+
+ # Box size:
+ # height: (0.56 - 0.54) * 4 = 0.08
+ # width: (0.56 - 0.54) * 8 = 0.16
+ object_scale = np.zeros((2, output_height, output_width, 2), dtype=np.float32)
+ object_scale[0, 2, 4] = 0.08, 0.16
+
+ # Box center offset coordinate (0.55, 0.55):
+ # y-offset: 0.55 * 4 - 2 = 0.2
+ # x-offset: 0.55 * 8 - 4 = 0.4
+ object_offset = np.zeros((2, output_height, output_width, 2),
+ dtype=np.float32)
+ object_offset[0, 2, 4] = 0.2, 0.4
+
+ keypoint_heatmap = np.zeros((2, output_height, output_width, _NUM_KEYPOINTS),
+ dtype=np.float32)
+ keypoint_heatmap[0, 2, 4, 1] = 1.0
+ keypoint_heatmap[0, 2, 4, 3] = 1.0
+ keypoint_heatmap = _logit(keypoint_heatmap)
+
+ keypoint_offset = np.zeros((2, output_height, output_width, 2),
+ dtype=np.float32)
+ keypoint_offset[0, 2, 4] = 0.2, 0.4
+
+ keypoint_regression = np.zeros(
+ (2, output_height, output_width, 2 * _NUM_KEYPOINTS), dtype=np.float32)
+ keypoint_regression[0, 2, 4] = 0.0, 0.0, 0.2, 0.4, 0.0, 0.0, 0.2, 0.4
+
+ mask_heatmap = np.zeros((2, output_height, output_width, _NUM_CLASSES),
+ dtype=np.float32)
+ mask_heatmap[0, 2, 4, 1] = 1.0
+ mask_heatmap = _logit(mask_heatmap)
+
+ prediction_dict = {
+ 'preprocessed_inputs':
+ tf.zeros((2, input_height, input_width, 3)),
+ cnma.OBJECT_CENTER: [
+ tf.constant(object_center),
+ tf.constant(object_center)
+ ],
+ cnma.BOX_SCALE: [
+ tf.constant(object_scale),
+ tf.constant(object_scale)
+ ],
+ cnma.BOX_OFFSET: [
+ tf.constant(object_offset),
+ tf.constant(object_offset)
+ ],
+ cnma.get_keypoint_name(_TASK_NAME, cnma.KEYPOINT_HEATMAP): [
+ tf.constant(keypoint_heatmap),
+ tf.constant(keypoint_heatmap)
+ ],
+ cnma.get_keypoint_name(_TASK_NAME, cnma.KEYPOINT_OFFSET): [
+ tf.constant(keypoint_offset),
+ tf.constant(keypoint_offset)
+ ],
+ cnma.get_keypoint_name(_TASK_NAME, cnma.KEYPOINT_REGRESSION): [
+ tf.constant(keypoint_regression),
+ tf.constant(keypoint_regression)
+ ],
+ cnma.SEGMENTATION_HEATMAP: [
+ tf.constant(mask_heatmap),
+ tf.constant(mask_heatmap)
+ ]
+ }
+ return prediction_dict
+
+
+def get_fake_groundtruth_dict(input_height, input_width, stride):
+ """Prepares the fake groundtruth dictionary."""
+ # A small box with center at (0.55, 0.55).
+ boxes = [
+ tf.constant([[0.54, 0.54, 0.56, 0.56]]),
+ tf.constant([[0.0, 0.0, 0.5, 0.5]]),
+ ]
+ classes = [
+ tf.one_hot([1], depth=_NUM_CLASSES),
+ tf.one_hot([0], depth=_NUM_CLASSES),
+ ]
+ weights = [
+ tf.constant([1.]),
+ tf.constant([0.]),
+ ]
+ keypoints = [
+ tf.tile(
+ tf.expand_dims(
+ tf.constant([[float('nan'), 0.55,
+ float('nan'), 0.55, 0.55, 0.0]]),
+ axis=2),
+ multiples=[1, 1, 2]),
+ tf.tile(
+ tf.expand_dims(
+ tf.constant([[float('nan'), 0.55,
+ float('nan'), 0.55, 0.55, 0.0]]),
+ axis=2),
+ multiples=[1, 1, 2]),
+ ]
+ labeled_classes = [
+ tf.one_hot([1], depth=_NUM_CLASSES) + tf.one_hot([2], depth=_NUM_CLASSES),
+ tf.one_hot([0], depth=_NUM_CLASSES) + tf.one_hot([1], depth=_NUM_CLASSES),
+ ]
+ mask = np.zeros((1, input_height, input_width), dtype=np.float32)
+ mask[0, 8:8+stride, 16:16+stride] = 1
+ masks = [
+ tf.constant(mask),
+ tf.zeros_like(mask),
+ ]
+ groundtruth_dict = {
+ fields.BoxListFields.boxes: boxes,
+ fields.BoxListFields.weights: weights,
+ fields.BoxListFields.classes: classes,
+ fields.BoxListFields.keypoints: keypoints,
+ fields.BoxListFields.masks: masks,
+ fields.InputDataFields.groundtruth_labeled_classes: labeled_classes,
+ }
+ return groundtruth_dict
+
+
+@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.')
+class CenterNetMetaComputeLossTest(test_case.TestCase):
+ """Test for CenterNet loss compuation related functions."""
+
+ def setUp(self):
+ self.model = build_center_net_meta_arch()
+ self.classification_loss_fn = self.model._center_params.classification_loss
+ self.localization_loss_fn = self.model._od_params.localization_loss
+ self.true_image_shapes = tf.constant([[16, 24, 3], [16, 24, 3]])
+ self.input_height = 16
+ self.input_width = 32
+ self.stride = 4
+ self.per_pixel_weights = self.get_per_pixel_weights(self.true_image_shapes,
+ self.input_height,
+ self.input_width,
+ self.stride)
+ self.prediction_dict = get_fake_prediction_dict(self.input_height,
+ self.input_width,
+ self.stride)
+ self.model._groundtruth_lists = get_fake_groundtruth_dict(
+ self.input_height, self.input_width, self.stride)
+ super(CenterNetMetaComputeLossTest, self).setUp()
+
+ def get_per_pixel_weights(self, true_image_shapes, input_height, input_width,
+ stride):
+ output_height, output_width = (input_height // stride,
+ input_width // stride)
+
+ # TODO(vighneshb) Explore whether using floor here is safe.
+ output_true_image_shapes = tf.ceil(tf.to_float(true_image_shapes) / stride)
+ per_pixel_weights = cnma.get_valid_anchor_weights_in_flattened_image(
+ output_true_image_shapes, output_height, output_width)
+ per_pixel_weights = tf.expand_dims(per_pixel_weights, 2)
+ return per_pixel_weights
+
+ def test_compute_object_center_loss(self):
+ def graph_fn():
+ loss = self.model._compute_object_center_loss(
+ object_center_predictions=self.prediction_dict[cnma.OBJECT_CENTER],
+ input_height=self.input_height,
+ input_width=self.input_width,
+ per_pixel_weights=self.per_pixel_weights)
+ return loss
+
+ loss = self.execute(graph_fn, [])
+
+ # The prediction and groundtruth are curated to produce very low loss.
+ self.assertGreater(0.01, loss)
+
+ default_value = self.model._center_params.use_only_known_classes
+ self.model._center_params = (
+ self.model._center_params._replace(use_only_known_classes=True))
+ loss = self.model._compute_object_center_loss(
+ object_center_predictions=self.prediction_dict[cnma.OBJECT_CENTER],
+ input_height=self.input_height,
+ input_width=self.input_width,
+ per_pixel_weights=self.per_pixel_weights)
+ self.model._center_params = (
+ self.model._center_params._replace(
+ use_only_known_classes=default_value))
+
+ # The prediction and groundtruth are curated to produce very low loss.
+ self.assertGreater(0.01, loss)
+
+ def test_compute_box_scale_and_offset_loss(self):
+ def graph_fn():
+ scale_loss, offset_loss = self.model._compute_box_scale_and_offset_loss(
+ scale_predictions=self.prediction_dict[cnma.BOX_SCALE],
+ offset_predictions=self.prediction_dict[cnma.BOX_OFFSET],
+ input_height=self.input_height,
+ input_width=self.input_width)
+ return scale_loss, offset_loss
+
+ scale_loss, offset_loss = self.execute(graph_fn, [])
+
+ # The prediction and groundtruth are curated to produce very low loss.
+ self.assertGreater(0.01, scale_loss)
+ self.assertGreater(0.01, offset_loss)
+
+ def test_compute_kp_heatmap_loss(self):
+ def graph_fn():
+ loss = self.model._compute_kp_heatmap_loss(
+ input_height=self.input_height,
+ input_width=self.input_width,
+ task_name=_TASK_NAME,
+ heatmap_predictions=self.prediction_dict[cnma.get_keypoint_name(
+ _TASK_NAME, cnma.KEYPOINT_HEATMAP)],
+ classification_loss_fn=self.classification_loss_fn,
+ per_pixel_weights=self.per_pixel_weights)
+ return loss
+
+ loss = self.execute(graph_fn, [])
+
+ # The prediction and groundtruth are curated to produce very low loss.
+ self.assertGreater(0.01, loss)
+
+ def test_compute_kp_offset_loss(self):
+ def graph_fn():
+ loss = self.model._compute_kp_offset_loss(
+ input_height=self.input_height,
+ input_width=self.input_width,
+ task_name=_TASK_NAME,
+ offset_predictions=self.prediction_dict[cnma.get_keypoint_name(
+ _TASK_NAME, cnma.KEYPOINT_OFFSET)],
+ localization_loss_fn=self.localization_loss_fn)
+ return loss
+
+ loss = self.execute(graph_fn, [])
+
+ # The prediction and groundtruth are curated to produce very low loss.
+ self.assertGreater(0.01, loss)
+
+ def test_compute_kp_regression_loss(self):
+ def graph_fn():
+ loss = self.model._compute_kp_regression_loss(
+ input_height=self.input_height,
+ input_width=self.input_width,
+ task_name=_TASK_NAME,
+ regression_predictions=self.prediction_dict[cnma.get_keypoint_name(
+ _TASK_NAME, cnma.KEYPOINT_REGRESSION,)],
+ localization_loss_fn=self.localization_loss_fn)
+ return loss
+
+ loss = self.execute(graph_fn, [])
+
+ # The prediction and groundtruth are curated to produce very low loss.
+ self.assertGreater(0.01, loss)
+
+
+@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.')
+class CenterNetMetaArchRestoreTest(test_case.TestCase):
+
+ def test_restore_map_resnet(self):
+ """Test restore map for a resnet backbone."""
+
+ model = build_center_net_meta_arch(build_resnet=True)
+ restore_from_objects_map = model.restore_from_objects('classification')
+ self.assertIsInstance(restore_from_objects_map['feature_extractor'],
+ tf.keras.Model)
+
+
+class DummyFeatureExtractor(cnma.CenterNetFeatureExtractor):
+
+ def __init__(self,
+ channel_means,
+ channel_stds,
+ bgr_ordering,
+ num_feature_outputs,
+ stride):
+ self._num_feature_outputs = num_feature_outputs
+ self._stride = stride
+ super(DummyFeatureExtractor, self).__init__(
+ channel_means=channel_means, channel_stds=channel_stds,
+ bgr_ordering=bgr_ordering)
+
+ def predict(self):
+ pass
+
+ def loss(self):
+ pass
+
+ def postprocess(self):
+ pass
+
+ def call(self, inputs):
+ batch_size, input_height, input_width, _ = inputs.shape
+ fake_output = tf.ones([
+ batch_size, input_height // self._stride, input_width // self._stride,
+ 64
+ ], dtype=tf.float32)
+ return [fake_output] * self._num_feature_outputs
+
+ @property
+ def out_stride(self):
+ return self._stride
+
+ @property
+ def num_feature_outputs(self):
+ return self._num_feature_outputs
+
+
+@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.')
+class CenterNetFeatureExtractorTest(test_case.TestCase):
+ """Test the base feature extractor class."""
+
+ def test_preprocess(self):
+ feature_extractor = DummyFeatureExtractor(
+ channel_means=(1.0, 2.0, 3.0),
+ channel_stds=(10., 20., 30.), bgr_ordering=False,
+ num_feature_outputs=2, stride=4)
+
+ img = np.zeros((2, 32, 32, 3))
+ img[:, :, :] = 11, 22, 33
+
+ def graph_fn():
+ output = feature_extractor.preprocess(img)
+ return output
+
+ output = self.execute(graph_fn, [])
+ self.assertAlmostEqual(output.sum(), 2 * 32 * 32 * 3)
+
+ def test_bgr_ordering(self):
+ feature_extractor = DummyFeatureExtractor(
+ channel_means=(0.0, 0.0, 0.0),
+ channel_stds=(1., 1., 1.), bgr_ordering=True,
+ num_feature_outputs=2, stride=4)
+
+ img = np.zeros((2, 32, 32, 3), dtype=np.float32)
+ img[:, :, :] = 1, 2, 3
+
+ def graph_fn():
+ output = feature_extractor.preprocess(img)
+ return output
+
+ output = self.execute(graph_fn, [])
+ self.assertAllClose(output[..., 2], 1 * np.ones((2, 32, 32)))
+ self.assertAllClose(output[..., 1], 2 * np.ones((2, 32, 32)))
+ self.assertAllClose(output[..., 0], 3 * np.ones((2, 32, 32)))
+
+ def test_default_ordering(self):
+ feature_extractor = DummyFeatureExtractor(
+ channel_means=(0.0, 0.0, 0.0),
+ channel_stds=(1., 1., 1.), bgr_ordering=False,
+ num_feature_outputs=2, stride=4)
+
+ img = np.zeros((2, 32, 32, 3), dtype=np.float32)
+ img[:, :, :] = 1, 2, 3
+
+ def graph_fn():
+ output = feature_extractor.preprocess(img)
+ return output
+
+ output = self.execute(graph_fn, [])
+ self.assertAllClose(output[..., 0], 1 * np.ones((2, 32, 32)))
+ self.assertAllClose(output[..., 1], 2 * np.ones((2, 32, 32)))
+ self.assertAllClose(output[..., 2], 3 * np.ones((2, 32, 32)))
+
+
+if __name__ == '__main__':
+ tf.enable_v2_behavior()
+ tf.test.main()
diff --git a/research/object_detection/meta_architectures/context_rcnn_lib.py b/research/object_detection/meta_architectures/context_rcnn_lib.py
new file mode 100644
index 0000000000000000000000000000000000000000..902a88c77669cd27eb36490d645740041600fcac
--- /dev/null
+++ b/research/object_detection/meta_architectures/context_rcnn_lib.py
@@ -0,0 +1,224 @@
+# Lint as: python3
+# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Library functions for ContextRCNN."""
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import tensorflow.compat.v1 as tf
+import tf_slim as slim
+
+
+# The negative value used in padding the invalid weights.
+_NEGATIVE_PADDING_VALUE = -100000
+
+
+def filter_weight_value(weights, values, valid_mask):
+ """Filters weights and values based on valid_mask.
+
+ _NEGATIVE_PADDING_VALUE will be added to invalid elements in the weights to
+ avoid their contribution in softmax. 0 will be set for the invalid elements in
+ the values.
+
+ Args:
+ weights: A float Tensor of shape [batch_size, input_size, context_size].
+ values: A float Tensor of shape [batch_size, context_size,
+ projected_dimension].
+ valid_mask: A boolean Tensor of shape [batch_size, context_size]. True means
+ valid and False means invalid.
+
+ Returns:
+ weights: A float Tensor of shape [batch_size, input_size, context_size].
+ values: A float Tensor of shape [batch_size, context_size,
+ projected_dimension].
+
+ Raises:
+ ValueError: If shape of doesn't match.
+ """
+ w_batch_size, _, w_context_size = weights.shape
+ v_batch_size, v_context_size, _ = values.shape
+ m_batch_size, m_context_size = valid_mask.shape
+ if w_batch_size != v_batch_size or v_batch_size != m_batch_size:
+ raise ValueError("Please make sure the first dimension of the input"
+ " tensors are the same.")
+
+ if w_context_size != v_context_size:
+ raise ValueError("Please make sure the third dimension of weights matches"
+ " the second dimension of values.")
+
+ if w_context_size != m_context_size:
+ raise ValueError("Please make sure the third dimension of the weights"
+ " matches the second dimension of the valid_mask.")
+
+ valid_mask = valid_mask[..., tf.newaxis]
+
+ # Force the invalid weights to be very negative so it won't contribute to
+ # the softmax.
+ weights += tf.transpose(
+ tf.cast(tf.math.logical_not(valid_mask), weights.dtype) *
+ _NEGATIVE_PADDING_VALUE,
+ perm=[0, 2, 1])
+
+ # Force the invalid values to be 0.
+ values *= tf.cast(valid_mask, values.dtype)
+
+ return weights, values
+
+
+def compute_valid_mask(num_valid_elements, num_elements):
+ """Computes mask of valid entries within padded context feature.
+
+ Args:
+ num_valid_elements: A int32 Tensor of shape [batch_size].
+ num_elements: An int32 Tensor.
+
+ Returns:
+ A boolean Tensor of the shape [batch_size, num_elements]. True means
+ valid and False means invalid.
+ """
+ batch_size = num_valid_elements.shape[0]
+ element_idxs = tf.range(num_elements, dtype=tf.int32)
+ batch_element_idxs = tf.tile(element_idxs[tf.newaxis, ...], [batch_size, 1])
+ num_valid_elements = num_valid_elements[..., tf.newaxis]
+ valid_mask = tf.less(batch_element_idxs, num_valid_elements)
+ return valid_mask
+
+
+def project_features(features, projection_dimension, is_training, normalize):
+ """Projects features to another feature space.
+
+ Args:
+ features: A float Tensor of shape [batch_size, features_size,
+ num_features].
+ projection_dimension: A int32 Tensor.
+ is_training: A boolean Tensor (affecting batch normalization).
+ normalize: A boolean Tensor. If true, the output features will be l2
+ normalized on the last dimension.
+
+ Returns:
+ A float Tensor of shape [batch, features_size, projection_dimension].
+ """
+ # TODO(guanhangwu) Figure out a better way of specifying the batch norm
+ # params.
+ batch_norm_params = {
+ "is_training": is_training,
+ "decay": 0.97,
+ "epsilon": 0.001,
+ "center": True,
+ "scale": True
+ }
+
+ batch_size, _, num_features = features.shape
+ features = tf.reshape(features, [-1, num_features])
+ projected_features = slim.fully_connected(
+ features,
+ num_outputs=projection_dimension,
+ activation_fn=tf.nn.relu6,
+ normalizer_fn=slim.batch_norm,
+ normalizer_params=batch_norm_params)
+
+ projected_features = tf.reshape(projected_features,
+ [batch_size, -1, projection_dimension])
+
+ if normalize:
+ projected_features = tf.math.l2_normalize(projected_features, axis=-1)
+
+ return projected_features
+
+
+def attention_block(input_features, context_features, bottleneck_dimension,
+ output_dimension, attention_temperature, valid_mask,
+ is_training):
+ """Generic attention block.
+
+ Args:
+ input_features: A float Tensor of shape [batch_size, input_size,
+ num_input_features].
+ context_features: A float Tensor of shape [batch_size, context_size,
+ num_context_features].
+ bottleneck_dimension: A int32 Tensor representing the bottleneck dimension
+ for intermediate projections.
+ output_dimension: A int32 Tensor representing the last dimension of the
+ output feature.
+ attention_temperature: A float Tensor. It controls the temperature of the
+ softmax for weights calculation. The formula for calculation as follows:
+ weights = exp(weights / temperature) / sum(exp(weights / temperature))
+ valid_mask: A boolean Tensor of shape [batch_size, context_size].
+ is_training: A boolean Tensor (affecting batch normalization).
+
+ Returns:
+ A float Tensor of shape [batch_size, input_size, output_dimension].
+ """
+
+ with tf.variable_scope("AttentionBlock"):
+ queries = project_features(
+ input_features, bottleneck_dimension, is_training, normalize=True)
+ keys = project_features(
+ context_features, bottleneck_dimension, is_training, normalize=True)
+ values = project_features(
+ context_features, bottleneck_dimension, is_training, normalize=True)
+
+ weights = tf.matmul(queries, keys, transpose_b=True)
+
+ weights, values = filter_weight_value(weights, values, valid_mask)
+
+ weights = tf.nn.softmax(weights / attention_temperature)
+
+ features = tf.matmul(weights, values)
+ output_features = project_features(
+ features, output_dimension, is_training, normalize=False)
+ return output_features
+
+
+def compute_box_context_attention(box_features, context_features,
+ valid_context_size, bottleneck_dimension,
+ attention_temperature, is_training):
+ """Computes the attention feature from the context given a batch of box.
+
+ Args:
+ box_features: A float Tensor of shape [batch_size, max_num_proposals,
+ height, width, channels]. It is pooled features from first stage
+ proposals.
+ context_features: A float Tensor of shape [batch_size, context_size,
+ num_context_features].
+ valid_context_size: A int32 Tensor of shape [batch_size].
+ bottleneck_dimension: A int32 Tensor representing the bottleneck dimension
+ for intermediate projections.
+ attention_temperature: A float Tensor. It controls the temperature of the
+ softmax for weights calculation. The formula for calculation as follows:
+ weights = exp(weights / temperature) / sum(exp(weights / temperature))
+ is_training: A boolean Tensor (affecting batch normalization).
+
+ Returns:
+ A float Tensor of shape [batch_size, max_num_proposals, 1, 1, channels].
+ """
+ _, context_size, _ = context_features.shape
+ valid_mask = compute_valid_mask(valid_context_size, context_size)
+
+ channels = box_features.shape[-1]
+ # Average pools over height and width dimension so that the shape of
+ # box_features becomes [batch_size, max_num_proposals, channels].
+ box_features = tf.reduce_mean(box_features, [2, 3])
+
+ output_features = attention_block(box_features, context_features,
+ bottleneck_dimension, channels.value,
+ attention_temperature, valid_mask,
+ is_training)
+
+ # Expands the dimension back to match with the original feature map.
+ output_features = output_features[:, :, tf.newaxis, tf.newaxis, :]
+
+ return output_features
diff --git a/research/object_detection/meta_architectures/context_rcnn_lib_tf1_test.py b/research/object_detection/meta_architectures/context_rcnn_lib_tf1_test.py
new file mode 100644
index 0000000000000000000000000000000000000000..a0b3b848d835dcad37f6c75f05b869fbaec4facb
--- /dev/null
+++ b/research/object_detection/meta_architectures/context_rcnn_lib_tf1_test.py
@@ -0,0 +1,126 @@
+# Lint as: python3
+# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Tests for context_rcnn_lib."""
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import unittest
+from absl.testing import parameterized
+import tensorflow.compat.v1 as tf
+
+from object_detection.meta_architectures import context_rcnn_lib
+from object_detection.utils import test_case
+from object_detection.utils import tf_version
+
+_NEGATIVE_PADDING_VALUE = -100000
+
+
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.')
+class ContextRcnnLibTest(parameterized.TestCase, test_case.TestCase,
+ tf.test.TestCase):
+ """Tests for the functions in context_rcnn_lib."""
+
+ def test_compute_valid_mask(self):
+ num_elements = tf.constant(3, tf.int32)
+ num_valid_elementss = tf.constant((1, 2), tf.int32)
+ valid_mask = context_rcnn_lib.compute_valid_mask(num_valid_elementss,
+ num_elements)
+ expected_valid_mask = tf.constant([[1, 0, 0], [1, 1, 0]], tf.float32)
+ self.assertAllEqual(valid_mask, expected_valid_mask)
+
+ def test_filter_weight_value(self):
+ weights = tf.ones((2, 3, 2), tf.float32) * 4
+ values = tf.ones((2, 2, 4), tf.float32)
+ valid_mask = tf.constant([[True, True], [True, False]], tf.bool)
+
+ filtered_weights, filtered_values = context_rcnn_lib.filter_weight_value(
+ weights, values, valid_mask)
+ expected_weights = tf.constant([[[4, 4], [4, 4], [4, 4]],
+ [[4, _NEGATIVE_PADDING_VALUE + 4],
+ [4, _NEGATIVE_PADDING_VALUE + 4],
+ [4, _NEGATIVE_PADDING_VALUE + 4]]])
+
+ expected_values = tf.constant([[[1, 1, 1, 1], [1, 1, 1, 1]],
+ [[1, 1, 1, 1], [0, 0, 0, 0]]])
+ self.assertAllEqual(filtered_weights, expected_weights)
+ self.assertAllEqual(filtered_values, expected_values)
+
+ # Changes the valid_mask so the results will be different.
+ valid_mask = tf.constant([[True, True], [False, False]], tf.bool)
+
+ filtered_weights, filtered_values = context_rcnn_lib.filter_weight_value(
+ weights, values, valid_mask)
+ expected_weights = tf.constant(
+ [[[4, 4], [4, 4], [4, 4]],
+ [[_NEGATIVE_PADDING_VALUE + 4, _NEGATIVE_PADDING_VALUE + 4],
+ [_NEGATIVE_PADDING_VALUE + 4, _NEGATIVE_PADDING_VALUE + 4],
+ [_NEGATIVE_PADDING_VALUE + 4, _NEGATIVE_PADDING_VALUE + 4]]])
+
+ expected_values = tf.constant([[[1, 1, 1, 1], [1, 1, 1, 1]],
+ [[0, 0, 0, 0], [0, 0, 0, 0]]])
+ self.assertAllEqual(filtered_weights, expected_weights)
+ self.assertAllEqual(filtered_values, expected_values)
+
+ @parameterized.parameters((2, True, True), (2, False, True),
+ (10, True, False), (10, False, False))
+ def test_project_features(self, projection_dimension, is_training, normalize):
+ features = tf.ones([2, 3, 4], tf.float32)
+ projected_features = context_rcnn_lib.project_features(
+ features,
+ projection_dimension,
+ is_training=is_training,
+ normalize=normalize)
+
+ # Makes sure the shape is correct.
+ self.assertAllEqual(projected_features.shape, [2, 3, projection_dimension])
+
+ @parameterized.parameters(
+ (2, 10, 1),
+ (3, 10, 2),
+ (4, 20, 3),
+ (5, 20, 4),
+ (7, 20, 5),
+ )
+ def test_attention_block(self, bottleneck_dimension, output_dimension,
+ attention_temperature):
+ input_features = tf.ones([2, 3, 4], tf.float32)
+ context_features = tf.ones([2, 2, 3], tf.float32)
+ valid_mask = tf.constant([[True, True], [False, False]], tf.bool)
+ is_training = False
+ output_features = context_rcnn_lib.attention_block(
+ input_features, context_features, bottleneck_dimension,
+ output_dimension, attention_temperature, valid_mask, is_training)
+
+ # Makes sure the shape is correct.
+ self.assertAllEqual(output_features.shape, [2, 3, output_dimension])
+
+ @parameterized.parameters(True, False)
+ def test_compute_box_context_attention(self, is_training):
+ box_features = tf.ones([2, 3, 4, 4, 4], tf.float32)
+ context_features = tf.ones([2, 5, 6], tf.float32)
+ valid_context_size = tf.constant((2, 3), tf.int32)
+ bottleneck_dimension = 10
+ attention_temperature = 1
+ attention_features = context_rcnn_lib.compute_box_context_attention(
+ box_features, context_features, valid_context_size,
+ bottleneck_dimension, attention_temperature, is_training)
+ # Makes sure the shape is correct.
+ self.assertAllEqual(attention_features.shape, [2, 3, 1, 1, 4])
+
+
+if __name__ == '__main__':
+ tf.test.main()
diff --git a/research/object_detection/meta_architectures/context_rcnn_meta_arch.py b/research/object_detection/meta_architectures/context_rcnn_meta_arch.py
new file mode 100644
index 0000000000000000000000000000000000000000..abe30558b01218df8999b3f0f7698e57f67f8ff2
--- /dev/null
+++ b/research/object_detection/meta_architectures/context_rcnn_meta_arch.py
@@ -0,0 +1,340 @@
+# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Context R-CNN meta-architecture definition.
+
+This adds the ability to use attention into contextual features within the
+Faster R-CNN object detection framework to improve object detection performance.
+See https://arxiv.org/abs/1912.03538 for more information.
+"""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+import functools
+
+from object_detection.core import standard_fields as fields
+from object_detection.meta_architectures import context_rcnn_lib
+from object_detection.meta_architectures import faster_rcnn_meta_arch
+
+
+class ContextRCNNMetaArch(faster_rcnn_meta_arch.FasterRCNNMetaArch):
+ """Context R-CNN Meta-architecture definition."""
+
+ def __init__(self,
+ is_training,
+ num_classes,
+ image_resizer_fn,
+ feature_extractor,
+ number_of_stages,
+ first_stage_anchor_generator,
+ first_stage_target_assigner,
+ first_stage_atrous_rate,
+ first_stage_box_predictor_arg_scope_fn,
+ first_stage_box_predictor_kernel_size,
+ first_stage_box_predictor_depth,
+ first_stage_minibatch_size,
+ first_stage_sampler,
+ first_stage_non_max_suppression_fn,
+ first_stage_max_proposals,
+ first_stage_localization_loss_weight,
+ first_stage_objectness_loss_weight,
+ crop_and_resize_fn,
+ initial_crop_size,
+ maxpool_kernel_size,
+ maxpool_stride,
+ second_stage_target_assigner,
+ second_stage_mask_rcnn_box_predictor,
+ second_stage_batch_size,
+ second_stage_sampler,
+ second_stage_non_max_suppression_fn,
+ second_stage_score_conversion_fn,
+ second_stage_localization_loss_weight,
+ second_stage_classification_loss_weight,
+ second_stage_classification_loss,
+ second_stage_mask_prediction_loss_weight=1.0,
+ hard_example_miner=None,
+ parallel_iterations=16,
+ add_summaries=True,
+ clip_anchors_to_image=False,
+ use_static_shapes=False,
+ resize_masks=True,
+ freeze_batchnorm=False,
+ return_raw_detections_during_predict=False,
+ output_final_box_features=False,
+ attention_bottleneck_dimension=None,
+ attention_temperature=None):
+ """ContextRCNNMetaArch Constructor.
+
+ Args:
+ is_training: A boolean indicating whether the training version of the
+ computation graph should be constructed.
+ num_classes: Number of classes. Note that num_classes *does not*
+ include the background category, so if groundtruth labels take values
+ in {0, 1, .., K-1}, num_classes=K (and not K+1, even though the
+ assigned classification targets can range from {0,... K}).
+ image_resizer_fn: A callable for image resizing. This callable
+ takes a rank-3 image tensor of shape [height, width, channels]
+ (corresponding to a single image), an optional rank-3 instance mask
+ tensor of shape [num_masks, height, width] and returns a resized rank-3
+ image tensor, a resized mask tensor if one was provided in the input. In
+ addition this callable must also return a 1-D tensor of the form
+ [height, width, channels] containing the size of the true image, as the
+ image resizer can perform zero padding. See protos/image_resizer.proto.
+ feature_extractor: A FasterRCNNFeatureExtractor object.
+ number_of_stages: An integer values taking values in {1, 2, 3}. If
+ 1, the function will construct only the Region Proposal Network (RPN)
+ part of the model. If 2, the function will perform box refinement and
+ other auxiliary predictions all in the second stage. If 3, it will
+ extract features from refined boxes and perform the auxiliary
+ predictions on the non-maximum suppressed refined boxes.
+ If is_training is true and the value of number_of_stages is 3, it is
+ reduced to 2 since all the model heads are trained in parallel in second
+ stage during training.
+ first_stage_anchor_generator: An anchor_generator.AnchorGenerator object
+ (note that currently we only support
+ grid_anchor_generator.GridAnchorGenerator objects)
+ first_stage_target_assigner: Target assigner to use for first stage of
+ Faster R-CNN (RPN).
+ first_stage_atrous_rate: A single integer indicating the atrous rate for
+ the single convolution op which is applied to the `rpn_features_to_crop`
+ tensor to obtain a tensor to be used for box prediction. Some feature
+ extractors optionally allow for producing feature maps computed at
+ denser resolutions. The atrous rate is used to compensate for the
+ denser feature maps by using an effectively larger receptive field.
+ (This should typically be set to 1).
+ first_stage_box_predictor_arg_scope_fn: Either a
+ Keras layer hyperparams object or a function to construct tf-slim
+ arg_scope for conv2d, separable_conv2d and fully_connected ops. Used
+ for the RPN box predictor. If it is a keras hyperparams object the
+ RPN box predictor will be a Keras model. If it is a function to
+ construct an arg scope it will be a tf-slim box predictor.
+ first_stage_box_predictor_kernel_size: Kernel size to use for the
+ convolution op just prior to RPN box predictions.
+ first_stage_box_predictor_depth: Output depth for the convolution op
+ just prior to RPN box predictions.
+ first_stage_minibatch_size: The "batch size" to use for computing the
+ objectness and location loss of the region proposal network. This
+ "batch size" refers to the number of anchors selected as contributing
+ to the loss function for any given image within the image batch and is
+ only called "batch_size" due to terminology from the Faster R-CNN paper.
+ first_stage_sampler: Sampler to use for first stage loss (RPN loss).
+ first_stage_non_max_suppression_fn: batch_multiclass_non_max_suppression
+ callable that takes `boxes`, `scores` and optional `clip_window`(with
+ all other inputs already set) and returns a dictionary containing
+ tensors with keys: `detection_boxes`, `detection_scores`,
+ `detection_classes`, `num_detections`. This is used to perform non max
+ suppression on the boxes predicted by the Region Proposal Network
+ (RPN).
+ See `post_processing.batch_multiclass_non_max_suppression` for the type
+ and shape of these tensors.
+ first_stage_max_proposals: Maximum number of boxes to retain after
+ performing Non-Max Suppression (NMS) on the boxes predicted by the
+ Region Proposal Network (RPN).
+ first_stage_localization_loss_weight: A float
+ first_stage_objectness_loss_weight: A float
+ crop_and_resize_fn: A differentiable resampler to use for cropping RPN
+ proposal features.
+ initial_crop_size: A single integer indicating the output size
+ (width and height are set to be the same) of the initial bilinear
+ interpolation based cropping during ROI pooling.
+ maxpool_kernel_size: A single integer indicating the kernel size of the
+ max pool op on the cropped feature map during ROI pooling.
+ maxpool_stride: A single integer indicating the stride of the max pool
+ op on the cropped feature map during ROI pooling.
+ second_stage_target_assigner: Target assigner to use for second stage of
+ Faster R-CNN. If the model is configured with multiple prediction heads,
+ this target assigner is used to generate targets for all heads (with the
+ correct `unmatched_class_label`).
+ second_stage_mask_rcnn_box_predictor: Mask R-CNN box predictor to use for
+ the second stage.
+ second_stage_batch_size: The batch size used for computing the
+ classification and refined location loss of the box classifier. This
+ "batch size" refers to the number of proposals selected as contributing
+ to the loss function for any given image within the image batch and is
+ only called "batch_size" due to terminology from the Faster R-CNN paper.
+ second_stage_sampler: Sampler to use for second stage loss (box
+ classifier loss).
+ second_stage_non_max_suppression_fn: batch_multiclass_non_max_suppression
+ callable that takes `boxes`, `scores`, optional `clip_window` and
+ optional (kwarg) `mask` inputs (with all other inputs already set)
+ and returns a dictionary containing tensors with keys:
+ `detection_boxes`, `detection_scores`, `detection_classes`,
+ `num_detections`, and (optionally) `detection_masks`. See
+ `post_processing.batch_multiclass_non_max_suppression` for the type and
+ shape of these tensors.
+ second_stage_score_conversion_fn: Callable elementwise nonlinearity
+ (that takes tensors as inputs and returns tensors). This is usually
+ used to convert logits to probabilities.
+ second_stage_localization_loss_weight: A float indicating the scale factor
+ for second stage localization loss.
+ second_stage_classification_loss_weight: A float indicating the scale
+ factor for second stage classification loss.
+ second_stage_classification_loss: Classification loss used by the second
+ stage classifier. Either losses.WeightedSigmoidClassificationLoss or
+ losses.WeightedSoftmaxClassificationLoss.
+ second_stage_mask_prediction_loss_weight: A float indicating the scale
+ factor for second stage mask prediction loss. This is applicable only if
+ second stage box predictor is configured to predict masks.
+ hard_example_miner: A losses.HardExampleMiner object (can be None).
+ parallel_iterations: (Optional) The number of iterations allowed to run
+ in parallel for calls to tf.map_fn.
+ add_summaries: boolean (default: True) controlling whether summary ops
+ should be added to tensorflow graph.
+ clip_anchors_to_image: Normally, anchors generated for a given image size
+ are pruned during training if they lie outside the image window. This
+ option clips the anchors to be within the image instead of pruning.
+ use_static_shapes: If True, uses implementation of ops with static shape
+ guarantees.
+ resize_masks: Indicates whether the masks presend in the groundtruth
+ should be resized in the model with `image_resizer_fn`
+ freeze_batchnorm: Whether to freeze batch norm parameters in the first
+ stage box predictor during training or not. When training with a small
+ batch size (e.g. 1), it is desirable to freeze batch norm update and
+ use pretrained batch norm params.
+ return_raw_detections_during_predict: Whether to return raw detection
+ boxes in the predict() method. These are decoded boxes that have not
+ been through postprocessing (i.e. NMS). Default False.
+ output_final_box_features: Whether to output final box features. If true,
+ it crops the feauture map based on the final box prediction and returns
+ in the dict as detection_features.
+ attention_bottleneck_dimension: A single integer. The bottleneck feature
+ dimension of the attention block.
+ attention_temperature: A single float. The attention temperature.
+
+ Raises:
+ ValueError: If `second_stage_batch_size` > `first_stage_max_proposals` at
+ training time.
+ ValueError: If first_stage_anchor_generator is not of type
+ grid_anchor_generator.GridAnchorGenerator.
+ """
+ super(ContextRCNNMetaArch, self).__init__(
+ is_training,
+ num_classes,
+ image_resizer_fn,
+ feature_extractor,
+ number_of_stages,
+ first_stage_anchor_generator,
+ first_stage_target_assigner,
+ first_stage_atrous_rate,
+ first_stage_box_predictor_arg_scope_fn,
+ first_stage_box_predictor_kernel_size,
+ first_stage_box_predictor_depth,
+ first_stage_minibatch_size,
+ first_stage_sampler,
+ first_stage_non_max_suppression_fn,
+ first_stage_max_proposals,
+ first_stage_localization_loss_weight,
+ first_stage_objectness_loss_weight,
+ crop_and_resize_fn,
+ initial_crop_size,
+ maxpool_kernel_size,
+ maxpool_stride,
+ second_stage_target_assigner,
+ second_stage_mask_rcnn_box_predictor,
+ second_stage_batch_size,
+ second_stage_sampler,
+ second_stage_non_max_suppression_fn,
+ second_stage_score_conversion_fn,
+ second_stage_localization_loss_weight,
+ second_stage_classification_loss_weight,
+ second_stage_classification_loss,
+ second_stage_mask_prediction_loss_weight=(
+ second_stage_mask_prediction_loss_weight),
+ hard_example_miner=hard_example_miner,
+ parallel_iterations=parallel_iterations,
+ add_summaries=add_summaries,
+ clip_anchors_to_image=clip_anchors_to_image,
+ use_static_shapes=use_static_shapes,
+ resize_masks=resize_masks,
+ freeze_batchnorm=freeze_batchnorm,
+ return_raw_detections_during_predict=(
+ return_raw_detections_during_predict),
+ output_final_box_features=output_final_box_features)
+
+ self._context_feature_extract_fn = functools.partial(
+ context_rcnn_lib.compute_box_context_attention,
+ bottleneck_dimension=attention_bottleneck_dimension,
+ attention_temperature=attention_temperature,
+ is_training=is_training)
+
+ @staticmethod
+ def get_side_inputs(features):
+ """Overrides the get_side_inputs function in the base class.
+
+ This function returns context_features and valid_context_size, which will be
+ used in the _compute_second_stage_input_feature_maps function.
+
+ Args:
+ features: A dictionary of tensors.
+
+ Returns:
+ A dictionary of tensors contains context_features and valid_context_size.
+
+ Raises:
+ ValueError: If context_features or valid_context_size is not in the
+ features.
+ """
+ if (fields.InputDataFields.context_features not in features or
+ fields.InputDataFields.valid_context_size not in features):
+ raise ValueError(
+ "Please make sure context_features and valid_context_size are in the "
+ "features")
+
+ return {
+ fields.InputDataFields.context_features:
+ features[fields.InputDataFields.context_features],
+ fields.InputDataFields.valid_context_size:
+ features[fields.InputDataFields.valid_context_size]
+ }
+
+ def _compute_second_stage_input_feature_maps(self, features_to_crop,
+ proposal_boxes_normalized,
+ context_features,
+ valid_context_size):
+ """Crops to a set of proposals from the feature map for a batch of images.
+
+ This function overrides the one in the FasterRCNNMetaArch. Aside from
+ cropping and resizing the feature maps, which is done in the parent class,
+ it adds context attention features to the box features.
+
+ Args:
+ features_to_crop: A float32 Tensor with shape [batch_size, height, width,
+ depth]
+ proposal_boxes_normalized: A float32 Tensor with shape [batch_size,
+ num_proposals, box_code_size] containing proposal boxes in normalized
+ coordinates.
+ context_features: A float Tensor of shape [batch_size, context_size,
+ num_context_features].
+ valid_context_size: A int32 Tensor of shape [batch_size].
+
+ Returns:
+ A float32 Tensor with shape [K, new_height, new_width, depth].
+ """
+ box_features = self._crop_and_resize_fn(
+ features_to_crop, proposal_boxes_normalized,
+ [self._initial_crop_size, self._initial_crop_size])
+
+ attention_features = self._context_feature_extract_fn(
+ box_features=box_features,
+ context_features=context_features,
+ valid_context_size=valid_context_size)
+
+ # Adds box features with attention features.
+ box_features += attention_features
+
+ flattened_feature_maps = self._flatten_first_two_dimensions(box_features)
+
+ return self._maxpool_layer(flattened_feature_maps)
diff --git a/research/object_detection/meta_architectures/context_rcnn_meta_arch_tf1_test.py b/research/object_detection/meta_architectures/context_rcnn_meta_arch_tf1_test.py
index 47d7624d02f1329dd92b2cfe0d97e5522369bc6f..a5dc8cc8e12f8e2ee95465c651b3570db0cca80f 100644
--- a/research/object_detection/meta_architectures/context_rcnn_meta_arch_tf1_test.py
+++ b/research/object_detection/meta_architectures/context_rcnn_meta_arch_tf1_test.py
@@ -1,4 +1,4 @@
-# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
+# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -12,14 +12,14 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
-"""Tests for google3.third_party.tensorflow_models.object_detection.meta_architectures.context_meta_arch."""
+"""Tests for object_detection.meta_architectures.context_meta_arch."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import functools
-
+import unittest
from absl.testing import parameterized
import mock
import tensorflow.compat.v1 as tf
@@ -109,6 +109,7 @@ class FakeFasterRCNNKerasFeatureExtractor(
])
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.')
class ContextRCNNMetaArchTest(test_case.TestCase, parameterized.TestCase):
def _get_model(self, box_predictor, **common_kwargs):
diff --git a/research/object_detection/meta_architectures/faster_rcnn_meta_arch.py b/research/object_detection/meta_architectures/faster_rcnn_meta_arch.py
index 58e3664553ccb57e943f34b8d38919aab9c83309..a07ddd09a63dd32bf43e6c5523cd3f263dda365e 100644
--- a/research/object_detection/meta_architectures/faster_rcnn_meta_arch.py
+++ b/research/object_detection/meta_architectures/faster_rcnn_meta_arch.py
@@ -261,31 +261,6 @@ class FasterRCNNKerasFeatureExtractor(object):
"""Get model that extracts second stage box classifier features."""
pass
- def restore_from_classification_checkpoint_fn(
- self,
- first_stage_feature_extractor_scope,
- second_stage_feature_extractor_scope):
- """Returns a map of variables to load from a foreign checkpoint.
-
- Args:
- first_stage_feature_extractor_scope: A scope name for the first stage
- feature extractor.
- second_stage_feature_extractor_scope: A scope name for the second stage
- feature extractor.
-
- Returns:
- A dict mapping variable names (to load from a checkpoint) to variables in
- the model graph.
- """
- variables_to_restore = {}
- for variable in variables_helper.get_global_variables_safely():
- for scope_name in [first_stage_feature_extractor_scope,
- second_stage_feature_extractor_scope]:
- if variable.op.name.startswith(scope_name):
- var_name = variable.op.name.replace(scope_name + '/', '')
- variables_to_restore[var_name] = variable
- return variables_to_restore
-
class FasterRCNNMetaArch(model.DetectionModel):
"""Faster R-CNN Meta-architecture definition."""
@@ -2808,6 +2783,46 @@ class FasterRCNNMetaArch(model.DetectionModel):
variables_to_restore, include_patterns=include_patterns)
return {var.op.name: var for var in feature_extractor_variables}
+ def restore_from_objects(self, fine_tune_checkpoint_type='detection'):
+ """Returns a map of Trackable objects to load from a foreign checkpoint.
+
+ Returns a dictionary of Tensorflow 2 Trackable objects (e.g. tf.Module
+ or Checkpoint). This enables the model to initialize based on weights from
+ another task. For example, the feature extractor variables from a
+ classification model can be used to bootstrap training of an object
+ detector. When loading from an object detection model, the checkpoint model
+ should have the same parameters as this detection model with exception of
+ the num_classes parameter.
+
+ Note that this function is intended to be used to restore Keras-based
+ models when running Tensorflow 2, whereas restore_map (above) is intended
+ to be used to restore Slim-based models when running Tensorflow 1.x.
+
+ Args:
+ fine_tune_checkpoint_type: whether to restore from a full detection
+ checkpoint (with compatible variable names) or to restore from a
+ classification checkpoint for initialization prior to training.
+ Valid values: `detection`, `classification`. Default 'detection'.
+
+ Returns:
+ A dict mapping keys to Trackable objects (tf.Module or Checkpoint).
+ """
+ if fine_tune_checkpoint_type == 'classification':
+ return {
+ 'feature_extractor':
+ self._feature_extractor.classification_backbone
+ }
+ elif fine_tune_checkpoint_type == 'detection':
+ fake_model = tf.train.Checkpoint(
+ _feature_extractor_for_box_classifier_features=
+ self._feature_extractor_for_box_classifier_features,
+ _feature_extractor_for_proposal_features=
+ self._feature_extractor_for_proposal_features)
+ return {'model': fake_model}
+ else:
+ raise ValueError('Not supported fine_tune_checkpoint_type: {}'.format(
+ fine_tune_checkpoint_type))
+
def updates(self):
"""Returns a list of update operators for this model.
diff --git a/research/object_detection/meta_architectures/ssd_meta_arch.py b/research/object_detection/meta_architectures/ssd_meta_arch.py
index d401b0de75a6a1c04984caad12986029e3166226..d5db202a8a5effc581f7e200dc49a7811e7a3d95 100644
--- a/research/object_detection/meta_architectures/ssd_meta_arch.py
+++ b/research/object_detection/meta_architectures/ssd_meta_arch.py
@@ -250,35 +250,6 @@ class SSDKerasFeatureExtractor(tf.keras.Model):
def call(self, inputs, **kwargs):
return self._extract_features(inputs)
- def restore_from_classification_checkpoint_fn(self, feature_extractor_scope):
- """Returns a map of variables to load from a foreign checkpoint.
-
- Args:
- feature_extractor_scope: A scope name for the feature extractor.
-
- Returns:
- A dict mapping variable names (to load from a checkpoint) to variables in
- the model graph.
- """
- variables_to_restore = {}
- if tf.executing_eagerly():
- for variable in self.variables:
- # variable.name includes ":0" at the end, but the names in the
- # checkpoint do not have the suffix ":0". So, we strip it here.
- var_name = variable.name[:-2]
- if var_name.startswith(feature_extractor_scope + '/'):
- var_name = var_name.replace(feature_extractor_scope + '/', '')
- variables_to_restore[var_name] = variable
- else:
- # b/137854499: use global_variables.
- for variable in variables_helper.get_global_variables_safely():
- var_name = variable.op.name
- if var_name.startswith(feature_extractor_scope + '/'):
- var_name = var_name.replace(feature_extractor_scope + '/', '')
- variables_to_restore[var_name] = variable
-
- return variables_to_restore
-
class SSDMetaArch(model.DetectionModel):
"""SSD Meta-architecture definition."""
@@ -1295,8 +1266,8 @@ class SSDMetaArch(model.DetectionModel):
classification checkpoint for initialization prior to training.
Valid values: `detection`, `classification`. Default 'detection'.
load_all_detection_checkpoint_vars: whether to load all variables (when
- `fine_tune_checkpoint_type='detection'`). If False, only variables
- within the appropriate scopes are included. Default False.
+ `fine_tune_checkpoint_type` is `detection`). If False, only variables
+ within the feature extractor scope are included. Default False.
Returns:
A dict mapping variable names (to load from a checkpoint) to variables in
@@ -1311,36 +1282,56 @@ class SSDMetaArch(model.DetectionModel):
elif fine_tune_checkpoint_type == 'detection':
variables_to_restore = {}
- if tf.executing_eagerly():
+ for variable in variables_helper.get_global_variables_safely():
+ var_name = variable.op.name
if load_all_detection_checkpoint_vars:
- # Grab all detection vars by name
- for variable in self.variables:
- # variable.name includes ":0" at the end, but the names in the
- # checkpoint do not have the suffix ":0". So, we strip it here.
- var_name = variable.name[:-2]
- variables_to_restore[var_name] = variable
+ variables_to_restore[var_name] = variable
else:
- # Grab just the feature extractor vars by name
- for variable in self._feature_extractor.variables:
- # variable.name includes ":0" at the end, but the names in the
- # checkpoint do not have the suffix ":0". So, we strip it here.
- var_name = variable.name[:-2]
- variables_to_restore[var_name] = variable
- else:
- for variable in variables_helper.get_global_variables_safely():
- var_name = variable.op.name
- if load_all_detection_checkpoint_vars:
+ if var_name.startswith(self._extract_features_scope):
variables_to_restore[var_name] = variable
- else:
- if var_name.startswith(self._extract_features_scope):
- variables_to_restore[var_name] = variable
-
return variables_to_restore
else:
raise ValueError('Not supported fine_tune_checkpoint_type: {}'.format(
fine_tune_checkpoint_type))
+ def restore_from_objects(self, fine_tune_checkpoint_type='detection'):
+ """Returns a map of Trackable objects to load from a foreign checkpoint.
+
+ Returns a dictionary of Tensorflow 2 Trackable objects (e.g. tf.Module
+ or Checkpoint). This enables the model to initialize based on weights from
+ another task. For example, the feature extractor variables from a
+ classification model can be used to bootstrap training of an object
+ detector. When loading from an object detection model, the checkpoint model
+ should have the same parameters as this detection model with exception of
+ the num_classes parameter.
+
+ Note that this function is intended to be used to restore Keras-based
+ models when running Tensorflow 2, whereas restore_map (above) is intended
+ to be used to restore Slim-based models when running Tensorflow 1.x.
+
+ Args:
+ fine_tune_checkpoint_type: whether to restore from a full detection
+ checkpoint (with compatible variable names) or to restore from a
+ classification checkpoint for initialization prior to training.
+ Valid values: `detection`, `classification`. Default 'detection'.
+
+ Returns:
+ A dict mapping keys to Trackable objects (tf.Module or Checkpoint).
+ """
+ if fine_tune_checkpoint_type == 'classification':
+ return {
+ 'feature_extractor':
+ self._feature_extractor.classification_backbone
+ }
+ elif fine_tune_checkpoint_type == 'detection':
+ fake_model = tf.train.Checkpoint(
+ _feature_extractor=self._feature_extractor)
+ return {'model': fake_model}
+ else:
+ raise ValueError('Not supported fine_tune_checkpoint_type: {}'.format(
+ fine_tune_checkpoint_type))
+
def updates(self):
"""Returns a list of update operators for this model.
diff --git a/research/object_detection/metrics/calibration_evaluation_test.py b/research/object_detection/metrics/calibration_evaluation_tf1_test.py
similarity index 98%
rename from research/object_detection/metrics/calibration_evaluation_test.py
rename to research/object_detection/metrics/calibration_evaluation_tf1_test.py
index 375978d86c9f4b537e212ec3a909a3fe6016495d..0f3d6eb319f0819937c04e030c9e1937bf09db10 100644
--- a/research/object_detection/metrics/calibration_evaluation_test.py
+++ b/research/object_detection/metrics/calibration_evaluation_tf1_test.py
@@ -18,9 +18,11 @@ from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
+import unittest
import tensorflow.compat.v1 as tf
from object_detection.core import standard_fields
from object_detection.metrics import calibration_evaluation
+from object_detection.utils import tf_version
def _get_categories_list():
@@ -36,6 +38,7 @@ def _get_categories_list():
}]
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.')
class CalibrationDetectionEvaluationTest(tf.test.TestCase):
def _get_ece(self, ece_op, update_op):
diff --git a/research/object_detection/metrics/calibration_metrics_test.py b/research/object_detection/metrics/calibration_metrics_tf1_test.py
similarity index 97%
rename from research/object_detection/metrics/calibration_metrics_test.py
rename to research/object_detection/metrics/calibration_metrics_tf1_test.py
index 54793fca09c464eec31149bccff31cbb6f83f4cf..9c1adbca20dfae80e97927d462c9cc18de6ff823 100644
--- a/research/object_detection/metrics/calibration_metrics_test.py
+++ b/research/object_detection/metrics/calibration_metrics_tf1_test.py
@@ -18,11 +18,14 @@ from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
+import unittest
import numpy as np
import tensorflow.compat.v1 as tf
from object_detection.metrics import calibration_metrics
+from object_detection.utils import tf_version
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.')
class CalibrationLibTest(tf.test.TestCase):
@staticmethod
diff --git a/research/object_detection/metrics/coco_evaluation.py b/research/object_detection/metrics/coco_evaluation.py
index 7a962457bd2a6690be6bae10342a62c6705db781..3ecfddb0dd4221c3e511fab628b884bc5eb514e6 100644
--- a/research/object_detection/metrics/coco_evaluation.py
+++ b/research/object_detection/metrics/coco_evaluation.py
@@ -24,6 +24,7 @@ import tensorflow.compat.v1 as tf
from object_detection.core import standard_fields
from object_detection.metrics import coco_tools
from object_detection.utils import json_utils
+from object_detection.utils import np_mask_ops
from object_detection.utils import object_detection_evaluation
@@ -1263,3 +1264,535 @@ class CocoMaskEvaluator(object_detection_evaluation.DetectionEvaluator):
eval_metric_ops[metric_name] = (tf.py_func(
value_func_factory(metric_name), [], np.float32), update_op)
return eval_metric_ops
+
+
+class CocoPanopticSegmentationEvaluator(
+ object_detection_evaluation.DetectionEvaluator):
+ """Class to evaluate PQ (panoptic quality) metric on COCO dataset.
+
+ More details about this metric: https://arxiv.org/pdf/1801.00868.pdf.
+ """
+
+ def __init__(self,
+ categories,
+ include_metrics_per_category=False,
+ iou_threshold=0.5,
+ ioa_threshold=0.5):
+ """Constructor.
+
+ Args:
+ categories: A list of dicts, each of which has the following keys -
+ 'id': (required) an integer id uniquely identifying this category.
+ 'name': (required) string representing category name e.g., 'cat', 'dog'.
+ include_metrics_per_category: If True, include metrics for each category.
+ iou_threshold: intersection-over-union threshold for mask matching (with
+ normal groundtruths).
+ ioa_threshold: intersection-over-area threshold for mask matching with
+ "is_crowd" groundtruths.
+ """
+ super(CocoPanopticSegmentationEvaluator, self).__init__(categories)
+ self._groundtruth_masks = {}
+ self._groundtruth_class_labels = {}
+ self._groundtruth_is_crowd = {}
+ self._predicted_masks = {}
+ self._predicted_class_labels = {}
+ self._include_metrics_per_category = include_metrics_per_category
+ self._iou_threshold = iou_threshold
+ self._ioa_threshold = ioa_threshold
+
+ def clear(self):
+ """Clears the state to prepare for a fresh evaluation."""
+ self._groundtruth_masks.clear()
+ self._groundtruth_class_labels.clear()
+ self._groundtruth_is_crowd.clear()
+ self._predicted_masks.clear()
+ self._predicted_class_labels.clear()
+
+ def add_single_ground_truth_image_info(self, image_id, groundtruth_dict):
+ """Adds groundtruth for a single image to be used for evaluation.
+
+ If the image has already been added, a warning is logged, and groundtruth is
+ ignored.
+
+ Args:
+ image_id: A unique string/integer identifier for the image.
+ groundtruth_dict: A dictionary containing -
+ InputDataFields.groundtruth_classes: integer numpy array of shape
+ [num_masks] containing 1-indexed groundtruth classes for the mask.
+ InputDataFields.groundtruth_instance_masks: uint8 numpy array of shape
+ [num_masks, image_height, image_width] containing groundtruth masks.
+ The elements of the array must be in {0, 1}.
+ InputDataFields.groundtruth_is_crowd (optional): integer numpy array of
+ shape [num_boxes] containing iscrowd flag for groundtruth boxes.
+ """
+
+ if image_id in self._groundtruth_masks:
+ tf.logging.warning(
+ 'Ignoring groundtruth with image %s, since it has already been '
+ 'added to the ground truth database.', image_id)
+ return
+
+ self._groundtruth_masks[image_id] = groundtruth_dict[
+ standard_fields.InputDataFields.groundtruth_instance_masks]
+ self._groundtruth_class_labels[image_id] = groundtruth_dict[
+ standard_fields.InputDataFields.groundtruth_classes]
+ groundtruth_is_crowd = groundtruth_dict.get(
+ standard_fields.InputDataFields.groundtruth_is_crowd)
+ # Drop groundtruth_is_crowd if empty tensor.
+ if groundtruth_is_crowd is not None and not groundtruth_is_crowd.size > 0:
+ groundtruth_is_crowd = None
+ if groundtruth_is_crowd is not None:
+ self._groundtruth_is_crowd[image_id] = groundtruth_is_crowd
+
+ def add_single_detected_image_info(self, image_id, detections_dict):
+ """Adds detections for a single image to be used for evaluation.
+
+ If a detection has already been added for this image id, a warning is
+ logged, and the detection is skipped.
+
+ Args:
+ image_id: A unique string/integer identifier for the image.
+ detections_dict: A dictionary containing -
+ DetectionResultFields.detection_classes: integer numpy array of shape
+ [num_masks] containing 1-indexed detection classes for the masks.
+ DetectionResultFields.detection_masks: optional uint8 numpy array of
+ shape [num_masks, image_height, image_width] containing instance
+ masks. The elements of the array must be in {0, 1}.
+
+ Raises:
+ ValueError: If results and groundtruth shape don't match.
+ """
+
+ if image_id not in self._groundtruth_masks:
+ raise ValueError('Missing groundtruth for image id: {}'.format(image_id))
+
+ detection_masks = detections_dict[
+ standard_fields.DetectionResultFields.detection_masks]
+ self._predicted_masks[image_id] = detection_masks
+ self._predicted_class_labels[image_id] = detections_dict[
+ standard_fields.DetectionResultFields.detection_classes]
+ groundtruth_mask_shape = self._groundtruth_masks[image_id].shape
+ if groundtruth_mask_shape[1:] != detection_masks.shape[1:]:
+ raise ValueError("The shape of results doesn't match groundtruth.")
+
+ def evaluate(self):
+ """Evaluates the detection masks and returns a dictionary of coco metrics.
+
+ Returns:
+ A dictionary holding -
+
+ 1. summary_metric:
+ 'PanopticQuality@%.2fIOU': mean panoptic quality averaged over classes at
+ the required IOU.
+ 'SegmentationQuality@%.2fIOU': mean segmentation quality averaged over
+ classes at the required IOU.
+ 'RecognitionQuality@%.2fIOU': mean recognition quality averaged over
+ classes at the required IOU.
+ 'NumValidClasses': number of valid classes. A valid class should have at
+ least one normal (is_crowd=0) groundtruth mask or one predicted mask.
+ 'NumTotalClasses': number of total classes.
+
+ 2. per_category_pq: if include_metrics_per_category is True, category
+ specific results with keys of the form:
+ 'PanopticQuality@%.2fIOU_ByCategory/category'.
+ """
+ # Evaluate and accumulate the iou/tp/fp/fn.
+ sum_tp_iou, sum_num_tp, sum_num_fp, sum_num_fn = self._evaluate_all_masks()
+ # Compute PQ metric for each category and average over all classes.
+ mask_metrics = self._compute_panoptic_metrics(sum_tp_iou, sum_num_tp,
+ sum_num_fp, sum_num_fn)
+ return mask_metrics
+
+ def get_estimator_eval_metric_ops(self, eval_dict):
+ """Returns a dictionary of eval metric ops.
+
+ Note that once value_op is called, the detections and groundtruth added via
+ update_op are cleared.
+
+ Args:
+ eval_dict: A dictionary that holds tensors for evaluating object detection
+ performance. For single-image evaluation, this dictionary may be
+ produced from eval_util.result_dict_for_single_example(). If multi-image
+ evaluation, `eval_dict` should contain the fields
+ 'num_gt_masks_per_image' and 'num_det_masks_per_image' to properly unpad
+ the tensors from the batch.
+
+ Returns:
+ a dictionary of metric names to tuple of value_op and update_op that can
+ be used as eval metric ops in tf.estimator.EstimatorSpec. Note that all
+ update ops must be run together and similarly all value ops must be run
+ together to guarantee correct behaviour.
+ """
+
+ def update_op(image_id_batched, groundtruth_classes_batched,
+ groundtruth_instance_masks_batched,
+ groundtruth_is_crowd_batched, num_gt_masks_per_image,
+ detection_classes_batched, detection_masks_batched,
+ num_det_masks_per_image):
+ """Update op for metrics."""
+ for (image_id, groundtruth_classes, groundtruth_instance_masks,
+ groundtruth_is_crowd, num_gt_mask, detection_classes,
+ detection_masks, num_det_mask) in zip(
+ image_id_batched, groundtruth_classes_batched,
+ groundtruth_instance_masks_batched, groundtruth_is_crowd_batched,
+ num_gt_masks_per_image, detection_classes_batched,
+ detection_masks_batched, num_det_masks_per_image):
+
+ self.add_single_ground_truth_image_info(
+ image_id, {
+ 'groundtruth_classes':
+ groundtruth_classes[:num_gt_mask],
+ 'groundtruth_instance_masks':
+ groundtruth_instance_masks[:num_gt_mask],
+ 'groundtruth_is_crowd':
+ groundtruth_is_crowd[:num_gt_mask]
+ })
+ self.add_single_detected_image_info(
+ image_id, {
+ 'detection_classes': detection_classes[:num_det_mask],
+ 'detection_masks': detection_masks[:num_det_mask]
+ })
+
+ # Unpack items from the evaluation dictionary.
+ (image_id, groundtruth_classes, groundtruth_instance_masks,
+ groundtruth_is_crowd, num_gt_masks_per_image, detection_classes,
+ detection_masks, num_det_masks_per_image
+ ) = self._unpack_evaluation_dictionary_items(eval_dict)
+
+ update_op = tf.py_func(update_op, [
+ image_id, groundtruth_classes, groundtruth_instance_masks,
+ groundtruth_is_crowd, num_gt_masks_per_image, detection_classes,
+ detection_masks, num_det_masks_per_image
+ ], [])
+
+ metric_names = [
+ 'PanopticQuality@%.2fIOU' % self._iou_threshold,
+ 'SegmentationQuality@%.2fIOU' % self._iou_threshold,
+ 'RecognitionQuality@%.2fIOU' % self._iou_threshold
+ ]
+ if self._include_metrics_per_category:
+ for category_dict in self._categories:
+ metric_names.append('PanopticQuality@%.2fIOU_ByCategory/%s' %
+ (self._iou_threshold, category_dict['name']))
+
+ def first_value_func():
+ self._metrics = self.evaluate()
+ self.clear()
+ return np.float32(self._metrics[metric_names[0]])
+
+ def value_func_factory(metric_name):
+
+ def value_func():
+ return np.float32(self._metrics[metric_name])
+
+ return value_func
+
+ # Ensure that the metrics are only evaluated once.
+ first_value_op = tf.py_func(first_value_func, [], tf.float32)
+ eval_metric_ops = {metric_names[0]: (first_value_op, update_op)}
+ with tf.control_dependencies([first_value_op]):
+ for metric_name in metric_names[1:]:
+ eval_metric_ops[metric_name] = (tf.py_func(
+ value_func_factory(metric_name), [], np.float32), update_op)
+ return eval_metric_ops
+
+ def _evaluate_all_masks(self):
+ """Evaluate all masks and compute sum iou/TP/FP/FN."""
+
+ sum_num_tp = {category['id']: 0 for category in self._categories}
+ sum_num_fp = sum_num_tp.copy()
+ sum_num_fn = sum_num_tp.copy()
+ sum_tp_iou = sum_num_tp.copy()
+
+ for image_id in self._groundtruth_class_labels:
+ # Separate normal and is_crowd groundtruth
+ crowd_gt_indices = self._groundtruth_is_crowd.get(image_id)
+ (normal_gt_masks, normal_gt_classes, crowd_gt_masks,
+ crowd_gt_classes) = self._separate_normal_and_crowd_labels(
+ crowd_gt_indices, self._groundtruth_masks[image_id],
+ self._groundtruth_class_labels[image_id])
+
+ # Mask matching to normal GT.
+ predicted_masks = self._predicted_masks[image_id]
+ predicted_class_labels = self._predicted_class_labels[image_id]
+ (overlaps, pred_matched,
+ gt_matched) = self._match_predictions_to_groundtruths(
+ predicted_masks,
+ predicted_class_labels,
+ normal_gt_masks,
+ normal_gt_classes,
+ self._iou_threshold,
+ is_crowd=False,
+ with_replacement=False)
+
+ # Accumulate true positives.
+ for (class_id, is_matched, overlap) in zip(predicted_class_labels,
+ pred_matched, overlaps):
+ if is_matched:
+ sum_num_tp[class_id] += 1
+ sum_tp_iou[class_id] += overlap
+
+ # Accumulate false negatives.
+ for (class_id, is_matched) in zip(normal_gt_classes, gt_matched):
+ if not is_matched:
+ sum_num_fn[class_id] += 1
+
+ # Match remaining predictions to crowd gt.
+ remained_pred_indices = np.logical_not(pred_matched)
+ remained_pred_masks = predicted_masks[remained_pred_indices, :, :]
+ remained_pred_classes = predicted_class_labels[remained_pred_indices]
+ _, pred_matched, _ = self._match_predictions_to_groundtruths(
+ remained_pred_masks,
+ remained_pred_classes,
+ crowd_gt_masks,
+ crowd_gt_classes,
+ self._ioa_threshold,
+ is_crowd=True,
+ with_replacement=True)
+
+ # Accumulate false positives
+ for (class_id, is_matched) in zip(remained_pred_classes, pred_matched):
+ if not is_matched:
+ sum_num_fp[class_id] += 1
+ return sum_tp_iou, sum_num_tp, sum_num_fp, sum_num_fn
+
+ def _compute_panoptic_metrics(self, sum_tp_iou, sum_num_tp, sum_num_fp,
+ sum_num_fn):
+ """Compute PQ metric for each category and average over all classes.
+
+ Args:
+ sum_tp_iou: dict, summed true positive intersection-over-union (IoU) for
+ each class, keyed by class_id.
+ sum_num_tp: the total number of true positives for each class, keyed by
+ class_id.
+ sum_num_fp: the total number of false positives for each class, keyed by
+ class_id.
+ sum_num_fn: the total number of false negatives for each class, keyed by
+ class_id.
+
+ Returns:
+ mask_metrics: a dictionary containing averaged metrics over all classes,
+ and per-category metrics if required.
+ """
+ mask_metrics = {}
+ sum_pq = 0
+ sum_sq = 0
+ sum_rq = 0
+ num_valid_classes = 0
+ for category in self._categories:
+ class_id = category['id']
+ (panoptic_quality, segmentation_quality,
+ recognition_quality) = self._compute_panoptic_metrics_single_class(
+ sum_tp_iou[class_id], sum_num_tp[class_id], sum_num_fp[class_id],
+ sum_num_fn[class_id])
+ if panoptic_quality is not None:
+ sum_pq += panoptic_quality
+ sum_sq += segmentation_quality
+ sum_rq += recognition_quality
+ num_valid_classes += 1
+ if self._include_metrics_per_category:
+ mask_metrics['PanopticQuality@%.2fIOU_ByCategory/%s' %
+ (self._iou_threshold,
+ category['name'])] = panoptic_quality
+ mask_metrics['PanopticQuality@%.2fIOU' %
+ self._iou_threshold] = sum_pq / num_valid_classes
+ mask_metrics['SegmentationQuality@%.2fIOU' %
+ self._iou_threshold] = sum_sq / num_valid_classes
+ mask_metrics['RecognitionQuality@%.2fIOU' %
+ self._iou_threshold] = sum_rq / num_valid_classes
+ mask_metrics['NumValidClasses'] = num_valid_classes
+ mask_metrics['NumTotalClasses'] = len(self._categories)
+ return mask_metrics
+
+ def _compute_panoptic_metrics_single_class(self, sum_tp_iou, num_tp, num_fp,
+ num_fn):
+ """Compute panoptic metrics: panoptic/segmentation/recognition quality.
+
+ More computation details in https://arxiv.org/pdf/1801.00868.pdf.
+ Args:
+ sum_tp_iou: summed true positive intersection-over-union (IoU) for a
+ specific class.
+ num_tp: the total number of true positives for a specific class.
+ num_fp: the total number of false positives for a specific class.
+ num_fn: the total number of false negatives for a specific class.
+
+ Returns:
+ panoptic_quality: sum_tp_iou / (num_tp + 0.5*num_fp + 0.5*num_fn).
+ segmentation_quality: sum_tp_iou / num_tp.
+ recognition_quality: num_tp / (num_tp + 0.5*num_fp + 0.5*num_fn).
+ """
+ denominator = num_tp + 0.5 * num_fp + 0.5 * num_fn
+ # Calculate metric only if there is at least one GT or one prediction.
+ if denominator > 0:
+ recognition_quality = num_tp / denominator
+ if num_tp > 0:
+ segmentation_quality = sum_tp_iou / num_tp
+ else:
+ # If there is no TP for this category.
+ segmentation_quality = 0
+ panoptic_quality = segmentation_quality * recognition_quality
+ return panoptic_quality, segmentation_quality, recognition_quality
+ else:
+ return None, None, None
+
+ def _separate_normal_and_crowd_labels(self, crowd_gt_indices,
+ groundtruth_masks, groundtruth_classes):
+ """Separate normal and crowd groundtruth class_labels and masks.
+
+ Args:
+ crowd_gt_indices: None or array of shape [num_groundtruths]. If None, all
+ groundtruths are treated as normal ones.
+ groundtruth_masks: array of shape [num_groundtruths, height, width].
+ groundtruth_classes: array of shape [num_groundtruths].
+
+ Returns:
+ normal_gt_masks: array of shape [num_normal_groundtruths, height, width].
+ normal_gt_classes: array of shape [num_normal_groundtruths].
+ crowd_gt_masks: array of shape [num_crowd_groundtruths, height, width].
+ crowd_gt_classes: array of shape [num_crowd_groundtruths].
+ Raises:
+ ValueError: if the shape of groundtruth classes doesn't match groundtruth
+ masks or if the shape of crowd_gt_indices.
+ """
+ if groundtruth_masks.shape[0] != groundtruth_classes.shape[0]:
+ raise ValueError(
+ "The number of masks doesn't match the number of labels.")
+ if crowd_gt_indices is None:
+ # All gts are treated as normal
+ crowd_gt_indices = np.zeros(groundtruth_masks.shape, dtype=np.bool)
+ else:
+ if groundtruth_masks.shape[0] != crowd_gt_indices.shape[0]:
+ raise ValueError(
+ "The number of masks doesn't match the number of is_crowd labels.")
+ crowd_gt_indices = crowd_gt_indices.astype(np.bool)
+ normal_gt_indices = np.logical_not(crowd_gt_indices)
+ if normal_gt_indices.size:
+ normal_gt_masks = groundtruth_masks[normal_gt_indices, :, :]
+ normal_gt_classes = groundtruth_classes[normal_gt_indices]
+ crowd_gt_masks = groundtruth_masks[crowd_gt_indices, :, :]
+ crowd_gt_classes = groundtruth_classes[crowd_gt_indices]
+ else:
+ # No groundtruths available, groundtruth_masks.shape = (0, h, w)
+ normal_gt_masks = groundtruth_masks
+ normal_gt_classes = groundtruth_classes
+ crowd_gt_masks = groundtruth_masks
+ crowd_gt_classes = groundtruth_classes
+ return normal_gt_masks, normal_gt_classes, crowd_gt_masks, crowd_gt_classes
+
+ def _match_predictions_to_groundtruths(self,
+ predicted_masks,
+ predicted_classes,
+ groundtruth_masks,
+ groundtruth_classes,
+ matching_threshold,
+ is_crowd=False,
+ with_replacement=False):
+ """Match the predicted masks to groundtruths.
+
+ Args:
+ predicted_masks: array of shape [num_predictions, height, width].
+ predicted_classes: array of shape [num_predictions].
+ groundtruth_masks: array of shape [num_groundtruths, height, width].
+ groundtruth_classes: array of shape [num_groundtruths].
+ matching_threshold: if the overlap between a prediction and a groundtruth
+ is larger than this threshold, the prediction is true positive.
+ is_crowd: whether the groundtruths are crowd annotation or not. If True,
+ use intersection over area (IoA) as the overlapping metric; otherwise
+ use intersection over union (IoU).
+ with_replacement: whether a groundtruth can be matched to multiple
+ predictions. By default, for normal groundtruths, only 1-1 matching is
+ allowed for normal groundtruths; for crowd groundtruths, 1-to-many must
+ be allowed.
+
+ Returns:
+ best_overlaps: array of shape [num_predictions]. Values representing the
+ IoU
+ or IoA with best matched groundtruth.
+ pred_matched: array of shape [num_predictions]. Boolean value representing
+ whether the ith prediction is matched to a groundtruth.
+ gt_matched: array of shape [num_groundtruth]. Boolean value representing
+ whether the ith groundtruth is matched to a prediction.
+ Raises:
+ ValueError: if the shape of groundtruth/predicted masks doesn't match
+ groundtruth/predicted classes.
+ """
+ if groundtruth_masks.shape[0] != groundtruth_classes.shape[0]:
+ raise ValueError(
+ "The number of GT masks doesn't match the number of labels.")
+ if predicted_masks.shape[0] != predicted_classes.shape[0]:
+ raise ValueError(
+ "The number of predicted masks doesn't match the number of labels.")
+ gt_matched = np.zeros(groundtruth_classes.shape, dtype=np.bool)
+ pred_matched = np.zeros(predicted_classes.shape, dtype=np.bool)
+ best_overlaps = np.zeros(predicted_classes.shape)
+ for pid in range(predicted_classes.shape[0]):
+ best_overlap = 0
+ matched_gt_id = -1
+ for gid in range(groundtruth_classes.shape[0]):
+ if predicted_classes[pid] == groundtruth_classes[gid]:
+ if (not with_replacement) and gt_matched[gid]:
+ continue
+ if not is_crowd:
+ overlap = np_mask_ops.iou(predicted_masks[pid:pid + 1],
+ groundtruth_masks[gid:gid + 1])[0, 0]
+ else:
+ overlap = np_mask_ops.ioa(groundtruth_masks[gid:gid + 1],
+ predicted_masks[pid:pid + 1])[0, 0]
+ if overlap >= matching_threshold and overlap > best_overlap:
+ matched_gt_id = gid
+ best_overlap = overlap
+ if matched_gt_id >= 0:
+ gt_matched[matched_gt_id] = True
+ pred_matched[pid] = True
+ best_overlaps[pid] = best_overlap
+ return best_overlaps, pred_matched, gt_matched
+
+ def _unpack_evaluation_dictionary_items(self, eval_dict):
+ """Unpack items from the evaluation dictionary."""
+ input_data_fields = standard_fields.InputDataFields
+ detection_fields = standard_fields.DetectionResultFields
+ image_id = eval_dict[input_data_fields.key]
+ groundtruth_classes = eval_dict[input_data_fields.groundtruth_classes]
+ groundtruth_instance_masks = eval_dict[
+ input_data_fields.groundtruth_instance_masks]
+ groundtruth_is_crowd = eval_dict.get(input_data_fields.groundtruth_is_crowd,
+ None)
+ num_gt_masks_per_image = eval_dict.get(
+ input_data_fields.num_groundtruth_boxes, None)
+ detection_classes = eval_dict[detection_fields.detection_classes]
+ detection_masks = eval_dict[detection_fields.detection_masks]
+ num_det_masks_per_image = eval_dict.get(detection_fields.num_detections,
+ None)
+ if groundtruth_is_crowd is None:
+ groundtruth_is_crowd = tf.zeros_like(groundtruth_classes, dtype=tf.bool)
+
+ if not image_id.shape.as_list():
+ # Apply a batch dimension to all tensors.
+ image_id = tf.expand_dims(image_id, 0)
+ groundtruth_classes = tf.expand_dims(groundtruth_classes, 0)
+ groundtruth_instance_masks = tf.expand_dims(groundtruth_instance_masks, 0)
+ groundtruth_is_crowd = tf.expand_dims(groundtruth_is_crowd, 0)
+ detection_classes = tf.expand_dims(detection_classes, 0)
+ detection_masks = tf.expand_dims(detection_masks, 0)
+
+ if num_gt_masks_per_image is None:
+ num_gt_masks_per_image = tf.shape(groundtruth_classes)[1:2]
+ else:
+ num_gt_masks_per_image = tf.expand_dims(num_gt_masks_per_image, 0)
+
+ if num_det_masks_per_image is None:
+ num_det_masks_per_image = tf.shape(detection_classes)[1:2]
+ else:
+ num_det_masks_per_image = tf.expand_dims(num_det_masks_per_image, 0)
+ else:
+ if num_gt_masks_per_image is None:
+ num_gt_masks_per_image = tf.tile(
+ tf.shape(groundtruth_classes)[1:2],
+ multiples=tf.shape(groundtruth_classes)[0:1])
+ if num_det_masks_per_image is None:
+ num_det_masks_per_image = tf.tile(
+ tf.shape(detection_classes)[1:2],
+ multiples=tf.shape(detection_classes)[0:1])
+ return (image_id, groundtruth_classes, groundtruth_instance_masks,
+ groundtruth_is_crowd, num_gt_masks_per_image, detection_classes,
+ detection_masks, num_det_masks_per_image)
diff --git a/research/object_detection/metrics/coco_evaluation_test.py b/research/object_detection/metrics/coco_evaluation_test.py
index aed6047f8c5dce427e5381398ad742c996fadc41..165c94780d93bb93bab9ab1187c7fa41b79b96b9 100644
--- a/research/object_detection/metrics/coco_evaluation_test.py
+++ b/research/object_detection/metrics/coco_evaluation_test.py
@@ -18,10 +18,12 @@ from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
+import unittest
import numpy as np
import tensorflow.compat.v1 as tf
from object_detection.core import standard_fields
from object_detection.metrics import coco_evaluation
+from object_detection.utils import tf_version
def _get_categories_list():
@@ -250,6 +252,7 @@ class CocoDetectionEvaluationTest(tf.test.TestCase):
})
+@unittest.skipIf(tf_version.is_tf2(), 'Only Supported in TF1.X')
class CocoEvaluationPyFuncTest(tf.test.TestCase):
def testGetOneMAPWithMatchingGroundtruthAndDetections(self):
@@ -926,6 +929,7 @@ class CocoKeypointEvaluationTest(tf.test.TestCase):
-1.0)
+@unittest.skipIf(tf_version.is_tf2(), 'Only Supported in TF1.X')
class CocoKeypointEvaluationPyFuncTest(tf.test.TestCase):
def testGetOneMAPWithMatchingKeypoints(self):
@@ -1438,6 +1442,7 @@ class CocoMaskEvaluationTest(tf.test.TestCase):
self.assertFalse(coco_evaluator._detection_masks_list)
+@unittest.skipIf(tf_version.is_tf2(), 'Only Supported in TF1.X')
class CocoMaskEvaluationPyFuncTest(tf.test.TestCase):
def testAddEvalDict(self):
@@ -1716,5 +1721,221 @@ class CocoMaskEvaluationPyFuncTest(tf.test.TestCase):
self.assertFalse(coco_evaluator._detection_masks_list)
+def _get_panoptic_test_data():
+ # image1 contains 3 people in gt, (2 normal annotation and 1 "is_crowd"
+ # annotation), and 3 people in prediction.
+ gt_masks1 = np.zeros((3, 50, 50), dtype=np.uint8)
+ result_masks1 = np.zeros((3, 50, 50), dtype=np.uint8)
+ gt_masks1[0, 10:20, 20:30] = 1
+ result_masks1[0, 10:18, 20:30] = 1
+ gt_masks1[1, 25:30, 25:35] = 1
+ result_masks1[1, 18:25, 25:30] = 1
+ gt_masks1[2, 40:50, 40:50] = 1
+ result_masks1[2, 47:50, 47:50] = 1
+ gt_class1 = np.array([1, 1, 1])
+ gt_is_crowd1 = np.array([0, 0, 1])
+ result_class1 = np.array([1, 1, 1])
+
+ # image2 contains 1 dog and 1 cat in gt, while 1 person and 1 dog in
+ # prediction.
+ gt_masks2 = np.zeros((2, 30, 40), dtype=np.uint8)
+ result_masks2 = np.zeros((2, 30, 40), dtype=np.uint8)
+ gt_masks2[0, 5:15, 20:35] = 1
+ gt_masks2[1, 20:30, 0:10] = 1
+ result_masks2[0, 20:25, 10:15] = 1
+ result_masks2[1, 6:15, 15:35] = 1
+ gt_class2 = np.array([2, 3])
+ gt_is_crowd2 = np.array([0, 0])
+ result_class2 = np.array([1, 2])
+
+ gt_class = [gt_class1, gt_class2]
+ gt_masks = [gt_masks1, gt_masks2]
+ gt_is_crowd = [gt_is_crowd1, gt_is_crowd2]
+ result_class = [result_class1, result_class2]
+ result_masks = [result_masks1, result_masks2]
+ return gt_class, gt_masks, gt_is_crowd, result_class, result_masks
+
+
+class CocoPanopticEvaluationTest(tf.test.TestCase):
+
+ def test_panoptic_quality(self):
+ pq_evaluator = coco_evaluation.CocoPanopticSegmentationEvaluator(
+ _get_categories_list(), include_metrics_per_category=True)
+ (gt_class, gt_masks, gt_is_crowd, result_class,
+ result_masks) = _get_panoptic_test_data()
+
+ for i in range(2):
+ pq_evaluator.add_single_ground_truth_image_info(
+ image_id='image%d' % i,
+ groundtruth_dict={
+ standard_fields.InputDataFields.groundtruth_classes:
+ gt_class[i],
+ standard_fields.InputDataFields.groundtruth_instance_masks:
+ gt_masks[i],
+ standard_fields.InputDataFields.groundtruth_is_crowd:
+ gt_is_crowd[i]
+ })
+
+ pq_evaluator.add_single_detected_image_info(
+ image_id='image%d' % i,
+ detections_dict={
+ standard_fields.DetectionResultFields.detection_classes:
+ result_class[i],
+ standard_fields.DetectionResultFields.detection_masks:
+ result_masks[i]
+ })
+
+ metrics = pq_evaluator.evaluate()
+ self.assertAlmostEqual(metrics['PanopticQuality@0.50IOU_ByCategory/person'],
+ 0.32)
+ self.assertAlmostEqual(metrics['PanopticQuality@0.50IOU_ByCategory/dog'],
+ 135.0 / 195)
+ self.assertAlmostEqual(metrics['PanopticQuality@0.50IOU_ByCategory/cat'], 0)
+ self.assertAlmostEqual(metrics['SegmentationQuality@0.50IOU'],
+ (0.8 + 135.0 / 195) / 3)
+ self.assertAlmostEqual(metrics['RecognitionQuality@0.50IOU'], (0.4 + 1) / 3)
+ self.assertAlmostEqual(metrics['PanopticQuality@0.50IOU'],
+ (0.32 + 135.0 / 195) / 3)
+ self.assertEqual(metrics['NumValidClasses'], 3)
+ self.assertEqual(metrics['NumTotalClasses'], 3)
+
+
+@unittest.skipIf(tf_version.is_tf2(), 'Only Supported in TF1.X')
+class CocoPanopticEvaluationPyFuncTest(tf.test.TestCase):
+
+ def testPanopticQualityNoBatch(self):
+ pq_evaluator = coco_evaluation.CocoPanopticSegmentationEvaluator(
+ _get_categories_list(), include_metrics_per_category=True)
+
+ image_id = tf.placeholder(tf.string, shape=())
+ groundtruth_classes = tf.placeholder(tf.int32, shape=(None))
+ groundtruth_masks = tf.placeholder(tf.uint8, shape=(None, None, None))
+ groundtruth_is_crowd = tf.placeholder(tf.int32, shape=(None))
+ detection_classes = tf.placeholder(tf.int32, shape=(None))
+ detection_masks = tf.placeholder(tf.uint8, shape=(None, None, None))
+
+ input_data_fields = standard_fields.InputDataFields
+ detection_fields = standard_fields.DetectionResultFields
+ eval_dict = {
+ input_data_fields.key: image_id,
+ input_data_fields.groundtruth_classes: groundtruth_classes,
+ input_data_fields.groundtruth_instance_masks: groundtruth_masks,
+ input_data_fields.groundtruth_is_crowd: groundtruth_is_crowd,
+ detection_fields.detection_classes: detection_classes,
+ detection_fields.detection_masks: detection_masks,
+ }
+
+ eval_metric_ops = pq_evaluator.get_estimator_eval_metric_ops(eval_dict)
+
+ _, update_op = eval_metric_ops['PanopticQuality@0.50IOU']
+ (gt_class, gt_masks, gt_is_crowd, result_class,
+ result_masks) = _get_panoptic_test_data()
+
+ with self.test_session() as sess:
+ for i in range(2):
+ sess.run(
+ update_op,
+ feed_dict={
+ image_id: 'image%d' % i,
+ groundtruth_classes: gt_class[i],
+ groundtruth_masks: gt_masks[i],
+ groundtruth_is_crowd: gt_is_crowd[i],
+ detection_classes: result_class[i],
+ detection_masks: result_masks[i]
+ })
+ metrics = {}
+ for key, (value_op, _) in eval_metric_ops.items():
+ metrics[key] = value_op
+ metrics = sess.run(metrics)
+ self.assertAlmostEqual(metrics['PanopticQuality@0.50IOU'],
+ (0.32 + 135.0 / 195) / 3)
+
+ def testPanopticQualityBatched(self):
+ pq_evaluator = coco_evaluation.CocoPanopticSegmentationEvaluator(
+ _get_categories_list(), include_metrics_per_category=True)
+ batch_size = 2
+ image_id = tf.placeholder(tf.string, shape=(batch_size))
+ groundtruth_classes = tf.placeholder(tf.int32, shape=(batch_size, None))
+ groundtruth_masks = tf.placeholder(
+ tf.uint8, shape=(batch_size, None, None, None))
+ groundtruth_is_crowd = tf.placeholder(tf.int32, shape=(batch_size, None))
+ detection_classes = tf.placeholder(tf.int32, shape=(batch_size, None))
+ detection_masks = tf.placeholder(
+ tf.uint8, shape=(batch_size, None, None, None))
+ num_gt_masks_per_image = tf.placeholder(tf.int32, shape=(batch_size))
+ num_det_masks_per_image = tf.placeholder(tf.int32, shape=(batch_size))
+
+ input_data_fields = standard_fields.InputDataFields
+ detection_fields = standard_fields.DetectionResultFields
+ eval_dict = {
+ input_data_fields.key: image_id,
+ input_data_fields.groundtruth_classes: groundtruth_classes,
+ input_data_fields.groundtruth_instance_masks: groundtruth_masks,
+ input_data_fields.groundtruth_is_crowd: groundtruth_is_crowd,
+ input_data_fields.num_groundtruth_boxes: num_gt_masks_per_image,
+ detection_fields.detection_classes: detection_classes,
+ detection_fields.detection_masks: detection_masks,
+ detection_fields.num_detections: num_det_masks_per_image,
+ }
+
+ eval_metric_ops = pq_evaluator.get_estimator_eval_metric_ops(eval_dict)
+
+ _, update_op = eval_metric_ops['PanopticQuality@0.50IOU']
+ (gt_class, gt_masks, gt_is_crowd, result_class,
+ result_masks) = _get_panoptic_test_data()
+ with self.test_session() as sess:
+ sess.run(
+ update_op,
+ feed_dict={
+ image_id: ['image0', 'image1'],
+ groundtruth_classes:
+ np.stack([
+ gt_class[0],
+ np.pad(gt_class[1], (0, 1), mode='constant')
+ ],
+ axis=0),
+ groundtruth_masks:
+ np.stack([
+ np.pad(
+ gt_masks[0], ((0, 0), (0, 10), (0, 10)),
+ mode='constant'),
+ np.pad(
+ gt_masks[1], ((0, 1), (0, 30), (0, 20)),
+ mode='constant'),
+ ],
+ axis=0),
+ groundtruth_is_crowd:
+ np.stack([
+ gt_is_crowd[0],
+ np.pad(gt_is_crowd[1], (0, 1), mode='constant')
+ ],
+ axis=0),
+ num_gt_masks_per_image: np.array([3, 2]),
+ detection_classes:
+ np.stack([
+ result_class[0],
+ np.pad(result_class[1], (0, 1), mode='constant')
+ ],
+ axis=0),
+ detection_masks:
+ np.stack([
+ np.pad(
+ result_masks[0], ((0, 0), (0, 10), (0, 10)),
+ mode='constant'),
+ np.pad(
+ result_masks[1], ((0, 1), (0, 30), (0, 20)),
+ mode='constant'),
+ ],
+ axis=0),
+ num_det_masks_per_image: np.array([3, 2]),
+ })
+ metrics = {}
+ for key, (value_op, _) in eval_metric_ops.items():
+ metrics[key] = value_op
+ metrics = sess.run(metrics)
+ self.assertAlmostEqual(metrics['PanopticQuality@0.50IOU'],
+ (0.32 + 135.0 / 195) / 3)
+
+
if __name__ == '__main__':
tf.test.main()
diff --git a/research/object_detection/metrics/coco_tools.py b/research/object_detection/metrics/coco_tools.py
index f2379f6537997a591d4ea661cce831b67dfc8d0f..790d5bdef23bef149e8eb1afa9cdecb9ce458e6e 100644
--- a/research/object_detection/metrics/coco_tools.py
+++ b/research/object_detection/metrics/coco_tools.py
@@ -52,6 +52,7 @@ from pycocotools import coco
from pycocotools import cocoeval
from pycocotools import mask
+import six
from six.moves import range
from six.moves import zip
import tensorflow.compat.v1 as tf
@@ -353,7 +354,9 @@ def _RleCompress(masks):
Returns:
A pycocotools Run-length encoding of the mask.
"""
- return mask.encode(np.asfortranarray(masks))
+ rle = mask.encode(np.asfortranarray(masks))
+ rle['counts'] = six.ensure_str(rle['counts'])
+ return rle
def ExportSingleImageGroundtruthToCoco(image_id,
diff --git a/research/object_detection/metrics/offline_eval_map_corloc.py b/research/object_detection/metrics/offline_eval_map_corloc.py
index 69ecaeaaed30ad0b330793a22ca730c6e923b4b8..a12b1d98493e022d302c76b0cadb514e7fc0eb60 100644
--- a/research/object_detection/metrics/offline_eval_map_corloc.py
+++ b/research/object_detection/metrics/offline_eval_map_corloc.py
@@ -36,8 +36,8 @@ import os
import re
import tensorflow.compat.v1 as tf
+from object_detection import eval_util
from object_detection.core import standard_fields
-from object_detection.legacy import evaluator
from object_detection.metrics import tf_example_parser
from object_detection.utils import config_util
from object_detection.utils import label_map_util
@@ -94,7 +94,7 @@ def read_data_and_evaluate(input_config, eval_config):
categories = label_map_util.create_categories_from_labelmap(
input_config.label_map_path)
- object_detection_evaluators = evaluator.get_evaluators(
+ object_detection_evaluators = eval_util.get_evaluators(
eval_config, categories)
# Support a single evaluator
object_detection_evaluator = object_detection_evaluators[0]
diff --git a/research/object_detection/model_lib.py b/research/object_detection/model_lib.py
index 5791251512071440f455744af6b280753cc253a1..365ea1c0261aa0b23c11460d2f5127632ed7b482 100644
--- a/research/object_detection/model_lib.py
+++ b/research/object_detection/model_lib.py
@@ -23,9 +23,9 @@ import functools
import os
import tensorflow.compat.v1 as tf
+import tensorflow.compat.v2 as tf2
import tf_slim as slim
-
from object_detection import eval_util
from object_detection import exporter as exporter_lib
from object_detection import inputs
@@ -349,7 +349,7 @@ def create_model_fn(detection_model_fn, configs, hparams, use_tpu=False,
from tensorflow.python.keras.engine import base_layer_utils # pylint: disable=g-import-not-at-top
# Enable v2 behavior, as `mixed_bfloat16` is only supported in TF 2.0.
base_layer_utils.enable_v2_dtype_behavior()
- tf.compat.v2.keras.mixed_precision.experimental.set_policy(
+ tf2.keras.mixed_precision.experimental.set_policy(
'mixed_bfloat16')
detection_model = detection_model_fn(
is_training=is_training, add_summaries=(not use_tpu))
diff --git a/research/object_detection/model_lib_test.py b/research/object_detection/model_lib_tf1_test.py
similarity index 98%
rename from research/object_detection/model_lib_test.py
rename to research/object_detection/model_lib_tf1_test.py
index ae14ad844eece3cf893d391a2abf2ff597ed650f..7d4d81b2cb43e0faa3d84f48df91c27d0da217bc 100644
--- a/research/object_detection/model_lib_test.py
+++ b/research/object_detection/model_lib_tf1_test.py
@@ -20,19 +20,17 @@ from __future__ import print_function
import functools
import os
-
+import unittest
import numpy as np
import tensorflow.compat.v1 as tf
-from tensorflow.contrib.tpu.python.tpu import tpu_config
-from tensorflow.contrib.tpu.python.tpu import tpu_estimator
-
from object_detection import inputs
from object_detection import model_hparams
from object_detection import model_lib
from object_detection.builders import model_builder
from object_detection.core import standard_fields as fields
from object_detection.utils import config_util
+from object_detection.utils import tf_version
# Model for test. Options are:
@@ -122,6 +120,7 @@ def _make_initializable_iterator(dataset):
return iterator
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.')
class ModelLibTest(tf.test.TestCase):
@classmethod
@@ -337,8 +336,7 @@ class ModelLibTest(tf.test.TestCase):
def test_create_tpu_estimator_and_inputs(self):
"""Tests that number of train/eval defaults to config values."""
-
- run_config = tpu_config.RunConfig()
+ run_config = tf.estimator.tpu.RunConfig()
hparams = model_hparams.create_hparams(
hparams_overrides='load_pretrained=false')
pipeline_config_path = get_pipeline_config_path(MODEL_NAME_FOR_TEST)
@@ -352,7 +350,7 @@ class ModelLibTest(tf.test.TestCase):
estimator = train_and_eval_dict['estimator']
train_steps = train_and_eval_dict['train_steps']
- self.assertIsInstance(estimator, tpu_estimator.TPUEstimator)
+ self.assertIsInstance(estimator, tf.estimator.tpu.TPUEstimator)
self.assertEqual(20, train_steps)
def test_create_train_and_eval_specs(self):
@@ -406,6 +404,7 @@ class ModelLibTest(tf.test.TestCase):
self.assertEqual(None, experiment.eval_steps)
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.')
class UnbatchTensorsTest(tf.test.TestCase):
def test_unbatch_without_unpadding(self):
diff --git a/research/object_detection/model_lib_v2_test.py b/research/object_detection/model_lib_tf2_test.py
similarity index 80%
rename from research/object_detection/model_lib_v2_test.py
rename to research/object_detection/model_lib_tf2_test.py
index d2eff82f9d698cc1839b8983e1006707cbbcf921..f65273660195752227b2bcc90dceb04184a6eb62 100644
--- a/research/object_detection/model_lib_v2_test.py
+++ b/research/object_detection/model_lib_tf2_test.py
@@ -20,18 +20,19 @@ from __future__ import print_function
import os
import tempfile
-
+import unittest
import numpy as np
import six
import tensorflow.compat.v1 as tf
+import tensorflow.compat.v2 as tf2
from object_detection import inputs
-from object_detection import model_hparams
from object_detection import model_lib_v2
from object_detection.builders import model_builder
from object_detection.core import model
from object_detection.protos import train_pb2
from object_detection.utils import config_util
+from object_detection.utils import tf_version
if six.PY2:
import mock # pylint: disable=g-importing-member,g-import-not-at-top
@@ -72,6 +73,7 @@ def _get_config_kwarg_overrides():
}
+@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.')
class ModelLibTest(tf.test.TestCase):
@classmethod
@@ -80,24 +82,25 @@ class ModelLibTest(tf.test.TestCase):
def test_train_loop_then_eval_loop(self):
"""Tests that Estimator and input function are constructed correctly."""
- hparams = model_hparams.create_hparams(
- hparams_overrides='load_pretrained=false')
+ model_dir = tf.test.get_temp_dir()
pipeline_config_path = get_pipeline_config_path(MODEL_NAME_FOR_TEST)
+ new_pipeline_config_path = os.path.join(model_dir, 'new_pipeline.config')
+ config_util.clear_fine_tune_checkpoint(pipeline_config_path,
+ new_pipeline_config_path)
config_kwarg_overrides = _get_config_kwarg_overrides()
- model_dir = tf.test.get_temp_dir()
train_steps = 2
- model_lib_v2.train_loop(
- hparams,
- pipeline_config_path,
- model_dir=model_dir,
- train_steps=train_steps,
- checkpoint_every_n=1,
- **config_kwarg_overrides)
+ strategy = tf2.distribute.OneDeviceStrategy(device='/cpu:0')
+ with strategy.scope():
+ model_lib_v2.train_loop(
+ new_pipeline_config_path,
+ model_dir=model_dir,
+ train_steps=train_steps,
+ checkpoint_every_n=1,
+ **config_kwarg_overrides)
model_lib_v2.eval_continuously(
- hparams,
- pipeline_config_path,
+ new_pipeline_config_path,
model_dir=model_dir,
checkpoint_dir=model_dir,
train_steps=train_steps,
@@ -120,6 +123,9 @@ class SimpleModel(model.DetectionModel):
return []
def restore_map(self, *args, **kwargs):
+ pass
+
+ def restore_from_objects(self, fine_tune_checkpoint_type):
return {'model': self}
def preprocess(self, _):
@@ -139,27 +145,31 @@ class SimpleModel(model.DetectionModel):
return []
+@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.')
class ModelCheckpointTest(tf.test.TestCase):
"""Test for model checkpoint related functionality."""
def test_checkpoint_max_to_keep(self):
"""Test that only the most recent checkpoints are kept."""
+ strategy = tf2.distribute.OneDeviceStrategy(device='/cpu:0')
with mock.patch.object(
model_builder, 'build', autospec=True) as mock_builder:
- mock_builder.return_value = SimpleModel()
-
- hparams = model_hparams.create_hparams(
- hparams_overrides='load_pretrained=false')
+ with strategy.scope():
+ mock_builder.return_value = SimpleModel()
+ model_dir = tempfile.mkdtemp(dir=self.get_temp_dir())
pipeline_config_path = get_pipeline_config_path(MODEL_NAME_FOR_TEST)
+ new_pipeline_config_path = os.path.join(model_dir, 'new_pipeline.config')
+ config_util.clear_fine_tune_checkpoint(pipeline_config_path,
+ new_pipeline_config_path)
config_kwarg_overrides = _get_config_kwarg_overrides()
- model_dir = tempfile.mkdtemp(dir=self.get_temp_dir())
- model_lib_v2.train_loop(
- hparams, pipeline_config_path, model_dir=model_dir,
- train_steps=20, checkpoint_every_n=2, checkpoint_max_to_keep=3,
- **config_kwarg_overrides
- )
+ with strategy.scope():
+ model_lib_v2.train_loop(
+ new_pipeline_config_path, model_dir=model_dir,
+ train_steps=20, checkpoint_every_n=2, checkpoint_max_to_keep=3,
+ **config_kwarg_overrides
+ )
ckpt_files = tf.io.gfile.glob(os.path.join(model_dir, 'ckpt-*.index'))
self.assertEqual(len(ckpt_files), 3,
'{} not of length 3.'.format(ckpt_files))
@@ -167,10 +177,11 @@ class ModelCheckpointTest(tf.test.TestCase):
class IncompatibleModel(SimpleModel):
- def restore_map(self, *args, **kwargs):
+ def restore_from_objects(self, *args, **kwargs):
return {'weight': self.weight}
+@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.')
class CheckpointV2Test(tf.test.TestCase):
def setUp(self):
@@ -199,7 +210,6 @@ class CheckpointV2Test(tf.test.TestCase):
model_lib_v2.load_fine_tune_checkpoint(
self._model, self._ckpt_path, checkpoint_type='',
checkpoint_version=train_pb2.CheckpointVersion.V2,
- load_all_detection_checkpoint_vars=True,
input_dataset=self._train_input_fn(),
unpad_groundtruth_tensors=True)
np.testing.assert_allclose(self._model.weight.numpy(), 42)
@@ -212,8 +222,9 @@ class CheckpointV2Test(tf.test.TestCase):
model_lib_v2.load_fine_tune_checkpoint(
IncompatibleModel(), self._ckpt_path, checkpoint_type='',
checkpoint_version=train_pb2.CheckpointVersion.V2,
- load_all_detection_checkpoint_vars=True,
input_dataset=self._train_input_fn(),
unpad_groundtruth_tensors=True)
+if __name__ == '__main__':
+ tf.test.main()
diff --git a/research/object_detection/model_lib_v2.py b/research/object_detection/model_lib_v2.py
index ab1fbdc1e13095834c99473b88ff718f77cb245a..8d266388d59cc9a254a26bc2ee6b5dbb70f30f87 100644
--- a/research/object_detection/model_lib_v2.py
+++ b/research/object_detection/model_lib_v2.py
@@ -34,7 +34,6 @@ from object_detection.protos import train_pb2
from object_detection.utils import config_util
from object_detection.utils import label_map_util
from object_detection.utils import ops
-from object_detection.utils import variables_helper
from object_detection.utils import visualization_utils as vutils
# pylint: disable=g-import-not-at-top
@@ -47,13 +46,6 @@ except ImportError:
MODEL_BUILD_UTIL_MAP = model_lib.MODEL_BUILD_UTIL_MAP
-### NOTE: This file is a wip.
-### TODO(kaftan): Explore adding unit tests for individual methods
-### TODO(kaftan): Add unit test that checks training on a single image w/
-#### groundtruth, and verfiy that loss goes to zero.
-#### Possibly have version that takes it as the whole train & eval dataset,
-#### & verify the loss output from the eval_loop method.
-### TODO(kaftan): Make sure the unit tests run in TAP presubmits or Kokoro
RESTORE_MAP_ERROR_TEMPLATE = (
'Since we are restoring a v2 style checkpoint'
@@ -277,14 +269,21 @@ def validate_tf_v2_checkpoint_restore_map(checkpoint_restore_map):
"""
for key, value in checkpoint_restore_map.items():
- if not (isinstance(key, str) and isinstance(value, tf.Module)):
+ if not (isinstance(key, str) and
+ (isinstance(value, tf.Module)
+ or isinstance(value, tf.train.Checkpoint))):
raise TypeError(RESTORE_MAP_ERROR_TEMPLATE.format(
key.__class__.__name__, value.__class__.__name__))
+def is_object_based_checkpoint(checkpoint_path):
+ """Returns true if `checkpoint_path` points to an object-based checkpoint."""
+ var_names = [var[0] for var in tf.train.list_variables(checkpoint_path)]
+ return '_CHECKPOINTABLE_OBJECT_GRAPH' in var_names
+
+
def load_fine_tune_checkpoint(
- model, checkpoint_path, checkpoint_type, checkpoint_version,
- load_all_detection_checkpoint_vars, input_dataset,
+ model, checkpoint_path, checkpoint_type, checkpoint_version, input_dataset,
unpad_groundtruth_tensors):
"""Load a fine tuning classification or detection checkpoint.
@@ -292,8 +291,7 @@ def load_fine_tune_checkpoint(
the model by computing a dummy loss. (Models might not have built their
variables before their first execution)
- It then loads a variable-name based classification or detection checkpoint
- that comes from converted TF 1.x slim model checkpoints.
+ It then loads an object-based classification or detection checkpoint.
This method updates the model in-place and does not return a value.
@@ -306,14 +304,22 @@ def load_fine_tune_checkpoint(
classification checkpoint for initialization prior to training.
Valid values: `detection`, `classification`.
checkpoint_version: train_pb2.CheckpointVersion.V1 or V2 enum indicating
- whether to load checkpoints in V1 style or V2 style.
- load_all_detection_checkpoint_vars: whether to load all variables (when
- `fine_tune_checkpoint_type` is `detection`). If False, only variables
- within the feature extractor scopes are included. Default False.
+ whether to load checkpoints in V1 style or V2 style. In this binary
+ we only support V2 style (object-based) checkpoints.
input_dataset: The tf.data Dataset the model is being trained on. Needed
to get the shapes for the dummy loss computation.
unpad_groundtruth_tensors: A parameter passed to unstack_batch.
+
+ Raises:
+ IOError: if `checkpoint_path` does not point at a valid object-based
+ checkpoint
+ ValueError: if `checkpoint_version` is not train_pb2.CheckpointVersion.V2
"""
+ if not is_object_based_checkpoint(checkpoint_path):
+ raise IOError('Checkpoint is expected to be an object-based checkpoint.')
+ if checkpoint_version == train_pb2.CheckpointVersion.V1:
+ raise ValueError('Checkpoint version should be V2')
+
features, labels = iter(input_dataset).next()
@tf.function
@@ -330,35 +336,20 @@ def load_fine_tune_checkpoint(
labels)
strategy = tf.compat.v2.distribute.get_strategy()
- strategy.run(
+ strategy.experimental_run_v2(
_dummy_computation_fn, args=(
features,
labels,
))
- if checkpoint_version == train_pb2.CheckpointVersion.V1:
- var_map = model.restore_map(
- fine_tune_checkpoint_type=checkpoint_type,
- load_all_detection_checkpoint_vars=(
- load_all_detection_checkpoint_vars))
- available_var_map = variables_helper.get_variables_available_in_checkpoint(
- var_map,
- checkpoint_path,
- include_global_step=False)
- tf.train.init_from_checkpoint(checkpoint_path,
- available_var_map)
- elif checkpoint_version == train_pb2.CheckpointVersion.V2:
- restore_map = model.restore_map(
- fine_tune_checkpoint_type=checkpoint_type,
- load_all_detection_checkpoint_vars=(
- load_all_detection_checkpoint_vars))
- validate_tf_v2_checkpoint_restore_map(restore_map)
-
- ckpt = tf.train.Checkpoint(**restore_map)
- ckpt.restore(checkpoint_path).assert_existing_objects_matched()
-
-
-def _get_filepath(strategy, filepath):
+ restore_from_objects_dict = model.restore_from_objects(
+ fine_tune_checkpoint_type=checkpoint_type)
+ validate_tf_v2_checkpoint_restore_map(restore_from_objects_dict)
+ ckpt = tf.train.Checkpoint(**restore_from_objects_dict)
+ ckpt.restore(checkpoint_path).assert_existing_objects_matched()
+
+
+def get_filepath(strategy, filepath):
"""Get appropriate filepath for worker.
Args:
@@ -377,7 +368,7 @@ def _get_filepath(strategy, filepath):
return os.path.join(filepath, 'temp_worker_{:03d}'.format(task_id))
-def _clean_temporary_directories(strategy, filepath):
+def clean_temporary_directories(strategy, filepath):
"""Temporary directory clean up for MultiWorker Mirrored Strategy.
This is needed for all non-chief workers.
@@ -392,14 +383,12 @@ def _clean_temporary_directories(strategy, filepath):
def train_loop(
- hparams,
pipeline_config_path,
model_dir,
config_override=None,
train_steps=None,
use_tpu=False,
save_final_config=False,
- export_to_tpu=None,
checkpoint_every_n=1000,
checkpoint_max_to_keep=7,
**kwargs):
@@ -417,7 +406,6 @@ def train_loop(
8. Logs the training metrics as TensorBoard summaries.
Args:
- hparams: A `HParams`.
pipeline_config_path: A path to a pipeline config file.
model_dir:
The directory to save checkpoints and summaries to.
@@ -428,10 +416,6 @@ def train_loop(
use_tpu: Boolean, whether training and evaluation should run on TPU.
save_final_config: Whether to save final config (obtained after applying
overrides) to `model_dir`.
- export_to_tpu: When use_tpu and export_to_tpu are true,
- `export_savedmodel()` exports a metagraph for serving on TPU besides the
- one on CPU. If export_to_tpu is not provided, we will look for it in
- hparams too.
checkpoint_every_n:
Checkpoint every n training steps.
checkpoint_max_to_keep:
@@ -453,7 +437,7 @@ def train_loop(
'use_bfloat16': configs['train_config'].use_bfloat16 and use_tpu
})
configs = merge_external_params_with_configs(
- configs, hparams, kwargs_dict=kwargs)
+ configs, None, kwargs_dict=kwargs)
model_config = configs['model']
train_config = configs['train_config']
train_input_config = configs['train_input_config']
@@ -468,33 +452,14 @@ def train_loop(
if train_steps is None and train_config.num_steps != 0:
train_steps = train_config.num_steps
- # Read export_to_tpu from hparams if not passed.
- if export_to_tpu is None:
- export_to_tpu = hparams.get('export_to_tpu', False)
- tf.logging.info(
- 'train_loop: use_tpu %s, export_to_tpu %s', use_tpu,
- export_to_tpu)
-
if kwargs['use_bfloat16']:
tf.compat.v2.keras.mixed_precision.experimental.set_policy('mixed_bfloat16')
- # Parse the checkpoint fine tuning configs
- if hparams.load_pretrained:
- fine_tune_checkpoint_path = train_config.fine_tune_checkpoint
- else:
- fine_tune_checkpoint_path = None
- load_all_detection_checkpoint_vars = (
- train_config.load_all_detection_checkpoint_vars)
- # TODO(kaftan) (or anyone else): move this piece of config munging to
- ## utils/config_util.py
- if not train_config.fine_tune_checkpoint_type:
- # train_config.from_detection_checkpoint field is deprecated. For
- # backward compatibility, set train_config.fine_tune_checkpoint_type
- # based on train_config.from_detection_checkpoint.
- if train_config.from_detection_checkpoint:
- train_config.fine_tune_checkpoint_type = 'detection'
- else:
- train_config.fine_tune_checkpoint_type = 'classification'
+ if train_config.load_all_detection_checkpoint_vars:
+ raise ValueError('train_pb2.load_all_detection_checkpoint_vars '
+ 'unsupported in TF2')
+
+ config_util.update_fine_tune_checkpoint_type(train_config)
fine_tune_checkpoint_type = train_config.fine_tune_checkpoint_type
fine_tune_checkpoint_version = train_config.fine_tune_checkpoint_version
@@ -539,8 +504,8 @@ def train_loop(
## Train the model
# Get the appropriate filepath (temporary or not) based on whether the worker
# is the chief.
- summary_writer_filepath = _get_filepath(strategy,
- os.path.join(model_dir, 'train'))
+ summary_writer_filepath = get_filepath(strategy,
+ os.path.join(model_dir, 'train'))
summary_writer = tf.compat.v2.summary.create_file_writer(
summary_writer_filepath)
@@ -556,18 +521,18 @@ def train_loop(
with tf.compat.v2.summary.record_if(
lambda: global_step % num_steps_per_iteration == 0):
# Load a fine-tuning checkpoint.
- if fine_tune_checkpoint_path:
- load_fine_tune_checkpoint(detection_model, fine_tune_checkpoint_path,
+ if train_config.fine_tune_checkpoint:
+ load_fine_tune_checkpoint(detection_model,
+ train_config.fine_tune_checkpoint,
fine_tune_checkpoint_type,
fine_tune_checkpoint_version,
- load_all_detection_checkpoint_vars,
train_input,
unpad_groundtruth_tensors)
ckpt = tf.compat.v2.train.Checkpoint(
step=global_step, model=detection_model, optimizer=optimizer)
- manager_dir = _get_filepath(strategy, model_dir)
+ manager_dir = get_filepath(strategy, model_dir)
if not strategy.extended.should_checkpoint:
checkpoint_max_to_keep = 1
manager = tf.compat.v2.train.CheckpointManager(
@@ -597,7 +562,7 @@ def train_loop(
def _sample_and_train(strategy, train_step_fn, data_iterator):
features, labels = data_iterator.next()
- per_replica_losses = strategy.run(
+ per_replica_losses = strategy.experimental_run_v2(
train_step_fn, args=(features, labels))
# TODO(anjalisridhar): explore if it is safe to remove the
## num_replicas scaling of the loss and switch this to a ReduceOp.Mean
@@ -615,6 +580,10 @@ def train_loop(
return _sample_and_train(strategy, train_step_fn, data_iterator)
train_input_iter = iter(train_input)
+
+ if int(global_step.value()) == 0:
+ manager.save()
+
checkpointed_step = int(global_step.value())
logged_step = global_step.value()
@@ -646,8 +615,8 @@ def train_loop(
# Remove the checkpoint directories of the non-chief workers that
# MultiWorkerMirroredStrategy forces us to save during sync distributed
# training.
- _clean_temporary_directories(strategy, manager_dir)
- _clean_temporary_directories(strategy, summary_writer_filepath)
+ clean_temporary_directories(strategy, manager_dir)
+ clean_temporary_directories(strategy, summary_writer_filepath)
def eager_eval_loop(
@@ -767,28 +736,25 @@ def eager_eval_loop(
return eval_dict, losses_dict, class_agnostic
+ agnostic_categories = label_map_util.create_class_agnostic_category_index()
+ per_class_categories = label_map_util.create_category_index_from_labelmap(
+ eval_input_config.label_map_path)
+ keypoint_edges = [
+ (kp.start, kp.end) for kp in eval_config.keypoint_edge]
+
for i, (features, labels) in enumerate(eval_dataset):
eval_dict, losses_dict, class_agnostic = compute_eval_dict(features, labels)
+ if class_agnostic:
+ category_index = agnostic_categories
+ else:
+ category_index = per_class_categories
+
if i % 100 == 0:
tf.logging.info('Finished eval step %d', i)
use_original_images = fields.InputDataFields.original_image in features
- if not use_tpu and use_original_images:
- # Summary for input images.
- tf.compat.v2.summary.image(
- name='eval_input_images',
- step=global_step,
- data=eval_dict['original_image'],
- max_outputs=1)
- # Summary for prediction/groundtruth side-by-side images.
- if class_agnostic:
- category_index = label_map_util.create_class_agnostic_category_index()
- else:
- category_index = label_map_util.create_category_index_from_labelmap(
- eval_input_config.label_map_path)
- keypoint_edges = [
- (kp.start, kp.end) for kp in eval_config.keypoint_edge]
+ if use_original_images and i < eval_config.num_visualizations:
sbys_image_list = vutils.draw_side_by_side_evaluation_image(
eval_dict,
category_index=category_index,
@@ -798,10 +764,10 @@ def eager_eval_loop(
keypoint_edges=keypoint_edges or None)
sbys_images = tf.concat(sbys_image_list, axis=0)
tf.compat.v2.summary.image(
- name='eval_side_by_side',
+ name='eval_side_by_side_' + str(i),
step=global_step,
data=sbys_images,
- max_outputs=eval_config.num_visualizations)
+ max_outputs=1)
if evaluators is None:
if class_agnostic:
@@ -830,14 +796,15 @@ def eager_eval_loop(
eval_metrics[loss_key] = loss_metrics[loss_key].result()
eval_metrics = {str(k): v for k, v in eval_metrics.items()}
+ tf.logging.info('Eval metrics at step %d', global_step)
for k in eval_metrics:
tf.compat.v2.summary.scalar(k, eval_metrics[k], step=global_step)
+ tf.logging.info('\t+ %s: %f', k, eval_metrics[k])
return eval_metrics
def eval_continuously(
- hparams,
pipeline_config_path,
config_override=None,
train_steps=None,
@@ -846,7 +813,6 @@ def eval_continuously(
use_tpu=False,
override_eval_num_epochs=True,
postprocess_on_cpu=False,
- export_to_tpu=None,
model_dir=None,
checkpoint_dir=None,
wait_interval=180,
@@ -859,7 +825,6 @@ def eval_continuously(
on the evaluation data.
Args:
- hparams: A `HParams`.
pipeline_config_path: A path to a pipeline config file.
config_override: A pipeline_pb2.TrainEvalPipelineConfig text proto to
override the config from `pipeline_config_path`.
@@ -875,10 +840,6 @@ def eval_continuously(
eval_input.
postprocess_on_cpu: When use_tpu and postprocess_on_cpu are true,
postprocess is scheduled on the host cpu.
- export_to_tpu: When use_tpu and export_to_tpu are true,
- `export_savedmodel()` exports a metagraph for serving on TPU besides the
- one on CPU. If export_to_tpu is not provided, we will look for it in
- hparams too.
model_dir: Directory to output resulting evaluation summaries to.
checkpoint_dir: Directory that contains the training checkpoints.
wait_interval: The mimmum number of seconds to wait before checking for a
@@ -906,7 +867,7 @@ def eval_continuously(
tf.logging.warning(
'Forced number of epochs for all eval validations to be 1.')
configs = merge_external_params_with_configs(
- configs, hparams, kwargs_dict=kwargs)
+ configs, None, kwargs_dict=kwargs)
model_config = configs['model']
train_input_config = configs['train_input_config']
eval_config = configs['eval_config']
@@ -938,12 +899,6 @@ def eval_continuously(
model=detection_model)
eval_inputs.append((eval_input_config.name, next_eval_input))
- # Read export_to_tpu from hparams if not passed.
- if export_to_tpu is None:
- export_to_tpu = hparams.get('export_to_tpu', False)
- tf.logging.info('eval_continuously: use_tpu %s, export_to_tpu %s',
- use_tpu, export_to_tpu)
-
global_step = tf.compat.v2.Variable(
0, trainable=False, dtype=tf.compat.v2.dtypes.int64)
@@ -956,7 +911,7 @@ def eval_continuously(
for eval_name, eval_input in eval_inputs:
summary_writer = tf.compat.v2.summary.create_file_writer(
- model_dir + '/eval' + eval_name)
+ os.path.join(model_dir, 'eval', eval_name))
with summary_writer.as_default():
eager_eval_loop(
detection_model,
diff --git a/research/object_detection/model_main_tf2.py b/research/object_detection/model_main_tf2.py
new file mode 100644
index 0000000000000000000000000000000000000000..715dc798cf7c15317ae316574b46b3fed9d1e614
--- /dev/null
+++ b/research/object_detection/model_main_tf2.py
@@ -0,0 +1,99 @@
+# Lint as: python3
+# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+
+r"""Creates and runs TF2 object detection models.
+
+For local training/evaluation run:
+PIPELINE_CONFIG_PATH=path/to/pipeline.config
+MODEL_DIR=/tmp/model_outputs
+NUM_TRAIN_STEPS=10000
+SAMPLE_1_OF_N_EVAL_EXAMPLES=1
+python model_main_tf2.py -- \
+ --model_dir=$MODEL_DIR --num_train_steps=$NUM_TRAIN_STEPS \
+ --sample_1_of_n_eval_examples=$SAMPLE_1_OF_N_EVAL_EXAMPLES \
+ --pipeline_config_path=$PIPELINE_CONFIG_PATH \
+ --alsologtostderr
+"""
+from absl import flags
+import tensorflow.compat.v2 as tf
+from object_detection import model_lib_v2
+
+flags.DEFINE_string('pipeline_config_path', None, 'Path to pipeline config '
+ 'file.')
+flags.DEFINE_integer('num_train_steps', None, 'Number of train steps.')
+flags.DEFINE_bool('eval_on_train_data', False, 'Enable evaluating on train '
+ 'data (only supported in distributed training).')
+flags.DEFINE_integer('sample_1_of_n_eval_examples', None, 'Will sample one of '
+ 'every n eval input examples, where n is provided.')
+flags.DEFINE_integer('sample_1_of_n_eval_on_train_examples', 5, 'Will sample '
+ 'one of every n train input examples for evaluation, '
+ 'where n is provided. This is only used if '
+ '`eval_training_data` is True.')
+flags.DEFINE_string(
+ 'model_dir', None, 'Path to output model directory '
+ 'where event and checkpoint files will be written.')
+flags.DEFINE_string(
+ 'checkpoint_dir', None, 'Path to directory holding a checkpoint. If '
+ '`checkpoint_dir` is provided, this binary operates in eval-only mode, '
+ 'writing resulting metrics to `model_dir`.')
+
+flags.DEFINE_integer('eval_timeout', 3600, 'Number of seconds to wait for an'
+ 'evaluation checkpoint before exiting.')
+
+flags.DEFINE_bool('use_tpu', False, 'Whether the job is executing on a TPU.')
+flags.DEFINE_integer(
+ 'num_workers', 1, 'When num_workers > 1, training uses '
+ 'MultiWorkerMirroredStrategy. When num_workers = 1 it uses '
+ 'MirroredStrategy.')
+
+FLAGS = flags.FLAGS
+
+
+def main(unused_argv):
+ flags.mark_flag_as_required('model_dir')
+ flags.mark_flag_as_required('pipeline_config_path')
+ tf.config.set_soft_device_placement(True)
+
+ if FLAGS.checkpoint_dir:
+ model_lib_v2.eval_continuously(
+ pipeline_config_path=FLAGS.pipeline_config_path,
+ model_dir=FLAGS.model_dir,
+ train_steps=FLAGS.num_train_steps,
+ sample_1_of_n_eval_examples=FLAGS.sample_1_of_n_eval_examples,
+ sample_1_of_n_eval_on_train_examples=(
+ FLAGS.sample_1_of_n_eval_on_train_examples),
+ checkpoint_dir=FLAGS.checkpoint_dir,
+ wait_interval=300, timeout=FLAGS.eval_timeout)
+ else:
+ if FLAGS.use_tpu:
+ resolver = tf.distribute.cluster_resolver.TPUClusterResolver()
+ tf.config.experimental_connect_to_cluster(resolver)
+ tf.tpu.experimental.initialize_tpu_system(resolver)
+ strategy = tf.distribute.experimental.TPUStrategy(resolver)
+ elif FLAGS.num_workers > 1:
+ strategy = tf.distribute.experimental.MultiWorkerMirroredStrategy()
+ else:
+ strategy = tf.compat.v2.distribute.MirroredStrategy()
+
+ with strategy.scope():
+ model_lib_v2.train_loop(
+ pipeline_config_path=FLAGS.pipeline_config_path,
+ model_dir=FLAGS.model_dir,
+ train_steps=FLAGS.num_train_steps,
+ use_tpu=FLAGS.use_tpu)
+
+if __name__ == '__main__':
+ tf.compat.v1.app.run()
diff --git a/research/object_detection/models/bidirectional_feature_pyramid_generators.py b/research/object_detection/models/bidirectional_feature_pyramid_generators.py
new file mode 100644
index 0000000000000000000000000000000000000000..b53dc60ef6465c408900800216cbe066e6d18259
--- /dev/null
+++ b/research/object_detection/models/bidirectional_feature_pyramid_generators.py
@@ -0,0 +1,486 @@
+# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Functions to generate bidirectional feature pyramids based on image features.
+
+Provides bidirectional feature pyramid network (BiFPN) generators that can be
+used to build object detection feature extractors, as proposed by Tan et al.
+See https://arxiv.org/abs/1911.09070 for more details.
+"""
+import collections
+import functools
+from six.moves import range
+from six.moves import zip
+import tensorflow as tf
+
+from object_detection.utils import bifpn_utils
+
+
+def _create_bifpn_input_config(fpn_min_level,
+ fpn_max_level,
+ input_max_level,
+ level_scales=None):
+ """Creates a BiFPN input config for the input levels from a backbone network.
+
+ Args:
+ fpn_min_level: the minimum pyramid level (highest feature map resolution) to
+ use in the BiFPN.
+ fpn_max_level: the maximum pyramid level (lowest feature map resolution) to
+ use in the BiFPN.
+ input_max_level: the maximum pyramid level that will be provided as input to
+ the BiFPN. Accordingly, the BiFPN will compute additional pyramid levels
+ from input_max_level, up to the desired fpn_max_level.
+ level_scales: a list of pyramid level scale factors. If 'None', each level's
+ scale is set to 2^level by default, which corresponds to each successive
+ feature map scaling by a factor of 2.
+
+ Returns:
+ A list of dictionaries for each feature map expected as input to the BiFPN,
+ where each has entries for the feature map 'name' and 'scale'.
+ """
+ if not level_scales:
+ level_scales = [2**i for i in range(fpn_min_level, fpn_max_level + 1)]
+
+ bifpn_input_params = []
+ for i in range(fpn_min_level, min(fpn_max_level, input_max_level) + 1):
+ bifpn_input_params.append({
+ 'name': '0_up_lvl_{}'.format(i),
+ 'scale': level_scales[i - fpn_min_level]
+ })
+
+ return bifpn_input_params
+
+
+def _get_bifpn_output_node_names(fpn_min_level, fpn_max_level, node_config):
+ """Returns a list of BiFPN output node names, given a BiFPN node config.
+
+ Args:
+ fpn_min_level: the minimum pyramid level (highest feature map resolution)
+ used by the BiFPN.
+ fpn_max_level: the maximum pyramid level (lowest feature map resolution)
+ used by the BiFPN.
+ node_config: the BiFPN node_config, a list of dictionaries corresponding to
+ each node in the BiFPN computation graph, where each entry should have an
+ associated 'name'.
+
+ Returns:
+ A list of strings corresponding to the names of the output BiFPN nodes.
+ """
+ num_output_nodes = fpn_max_level - fpn_min_level + 1
+ return [node['name'] for node in node_config[-num_output_nodes:]]
+
+
+def _create_bifpn_node_config(bifpn_num_iterations,
+ bifpn_num_filters,
+ fpn_min_level,
+ fpn_max_level,
+ input_max_level,
+ bifpn_node_params=None,
+ level_scales=None):
+ """Creates a config specifying a bidirectional feature pyramid network.
+
+ Args:
+ bifpn_num_iterations: the number of top-down bottom-up feature computations
+ to repeat in the BiFPN.
+ bifpn_num_filters: the number of filters (channels) for every feature map
+ used in the BiFPN.
+ fpn_min_level: the minimum pyramid level (highest feature map resolution) to
+ use in the BiFPN.
+ fpn_max_level: the maximum pyramid level (lowest feature map resolution) to
+ use in the BiFPN.
+ input_max_level: the maximum pyramid level that will be provided as input to
+ the BiFPN. Accordingly, the BiFPN will compute additional pyramid levels
+ from input_max_level, up to the desired fpn_max_level.
+ bifpn_node_params: If not 'None', a dictionary of additional default BiFPN
+ node parameters that will be applied to all BiFPN nodes.
+ level_scales: a list of pyramid level scale factors. If 'None', each level's
+ scale is set to 2^level by default, which corresponds to each successive
+ feature map scaling by a factor of 2.
+
+ Returns:
+ A list of dictionaries used to define nodes in the BiFPN computation graph,
+ as proposed by EfficientDet, Tan et al (https://arxiv.org/abs/1911.09070).
+ Each node's entry has the corresponding keys:
+ name: String. The name of this node in the BiFPN. The node name follows
+ the format '{bifpn_iteration}_{dn|up}_lvl_{pyramid_level}', where 'dn'
+ or 'up' refers to whether the node is in the top-down or bottom-up
+ portion of a single BiFPN iteration.
+ scale: the scale factor for this node, by default 2^level.
+ inputs: A list of names of nodes which are inputs to this node.
+ num_channels: The number of channels for this node.
+ combine_method: String. Name of the method used to combine input
+ node feature maps, 'fast_attention' by default for nodes which have more
+ than one input. Otherwise, 'None' for nodes with only one input node.
+ input_op: A (partial) function which is called to construct the layers
+ that will be applied to this BiFPN node's inputs. This function is
+ called with the arguments:
+ input_op(name, input_scale, input_num_channels, output_scale,
+ output_num_channels, conv_hyperparams, is_training,
+ freeze_batchnorm)
+ post_combine_op: A (partial) function which is called to construct the
+ layers that will be applied to the result of the combine operation for
+ this BiFPN node. This function will be called with the arguments:
+ post_combine_op(name, conv_hyperparams, is_training, freeze_batchnorm)
+ If 'None', then no layers will be applied after the combine operation
+ for this node.
+ """
+ if not level_scales:
+ level_scales = [2**i for i in range(fpn_min_level, fpn_max_level + 1)]
+
+ default_node_params = {
+ 'num_channels':
+ bifpn_num_filters,
+ 'combine_method':
+ 'fast_attention',
+ 'input_op':
+ functools.partial(
+ _create_bifpn_resample_block, downsample_method='max_pooling'),
+ 'post_combine_op':
+ functools.partial(
+ bifpn_utils.create_conv_block,
+ num_filters=bifpn_num_filters,
+ kernel_size=3,
+ strides=1,
+ padding='SAME',
+ use_separable=True,
+ apply_batchnorm=True,
+ apply_activation=True,
+ conv_bn_act_pattern=False),
+ }
+ if bifpn_node_params:
+ default_node_params.update(bifpn_node_params)
+
+ bifpn_node_params = []
+ # Create additional base pyramid levels not provided as input to the BiFPN.
+ # Note, combine_method and post_combine_op are set to None for additional
+ # base pyramid levels because they do not combine multiple input BiFPN nodes.
+ for i in range(input_max_level + 1, fpn_max_level + 1):
+ node_params = dict(default_node_params)
+ node_params.update({
+ 'name': '0_up_lvl_{}'.format(i),
+ 'scale': level_scales[i - fpn_min_level],
+ 'inputs': ['0_up_lvl_{}'.format(i - 1)],
+ 'combine_method': None,
+ 'post_combine_op': None,
+ })
+ bifpn_node_params.append(node_params)
+
+ for i in range(bifpn_num_iterations):
+ # The first bottom-up feature pyramid (which includes the input pyramid
+ # levels from the backbone network and the additional base pyramid levels)
+ # is indexed at 0. So, the first top-down bottom-up pass of the BiFPN is
+ # indexed from 1, and repeated for bifpn_num_iterations iterations.
+ bifpn_i = i + 1
+
+ # Create top-down nodes.
+ for level_i in reversed(range(fpn_min_level, fpn_max_level)):
+ inputs = []
+ # BiFPN nodes in the top-down pass receive input from the corresponding
+ # level from the previous BiFPN iteration's bottom-up pass, except for the
+ # bottom-most (min) level node, which is computed once in the initial
+ # bottom-up pass, and is afterwards only computed in each top-down pass.
+ if level_i > fpn_min_level or bifpn_i == 1:
+ inputs.append('{}_up_lvl_{}'.format(bifpn_i - 1, level_i))
+ else:
+ inputs.append('{}_dn_lvl_{}'.format(bifpn_i - 1, level_i))
+ inputs.append(bifpn_node_params[-1]['name'])
+ node_params = dict(default_node_params)
+ node_params.update({
+ 'name': '{}_dn_lvl_{}'.format(bifpn_i, level_i),
+ 'scale': level_scales[level_i - fpn_min_level],
+ 'inputs': inputs
+ })
+ bifpn_node_params.append(node_params)
+
+ # Create bottom-up nodes.
+ for level_i in range(fpn_min_level + 1, fpn_max_level + 1):
+ # BiFPN nodes in the bottom-up pass receive input from the corresponding
+ # level from the preceding top-down pass, except for the top (max) level
+ # which does not have a corresponding node in the top-down pass.
+ inputs = ['{}_up_lvl_{}'.format(bifpn_i - 1, level_i)]
+ if level_i < fpn_max_level:
+ inputs.append('{}_dn_lvl_{}'.format(bifpn_i, level_i))
+ inputs.append(bifpn_node_params[-1]['name'])
+ node_params = dict(default_node_params)
+ node_params.update({
+ 'name': '{}_up_lvl_{}'.format(bifpn_i, level_i),
+ 'scale': level_scales[level_i - fpn_min_level],
+ 'inputs': inputs
+ })
+ bifpn_node_params.append(node_params)
+
+ return bifpn_node_params
+
+
+def _create_bifpn_resample_block(name,
+ input_scale,
+ input_num_channels,
+ output_scale,
+ output_num_channels,
+ conv_hyperparams,
+ is_training,
+ freeze_batchnorm,
+ downsample_method=None,
+ use_native_resize_op=False,
+ maybe_apply_1x1_conv=True,
+ apply_1x1_pre_sampling=True,
+ apply_1x1_post_sampling=False):
+ """Creates resample block layers for input feature maps to BiFPN nodes.
+
+ Args:
+ name: String. Name used for this block of layers.
+ input_scale: Scale factor of the input feature map.
+ input_num_channels: Number of channels in the input feature map.
+ output_scale: Scale factor of the output feature map.
+ output_num_channels: Number of channels in the output feature map.
+ conv_hyperparams: A `hyperparams_builder.KerasLayerHyperparams` object
+ containing hyperparameters for convolution ops.
+ is_training: Indicates whether the feature generator is in training mode.
+ freeze_batchnorm: Bool. Whether to freeze batch norm parameters during
+ training or not. When training with a small batch size (e.g. 1), it is
+ desirable to freeze batch norm update and use pretrained batch norm
+ params.
+ downsample_method: String. Method to use when downsampling feature maps.
+ use_native_resize_op: Bool. Whether to use the native resize up when
+ upsampling feature maps.
+ maybe_apply_1x1_conv: Bool. If 'True', a 1x1 convolution will only be
+ applied if the input_num_channels differs from the output_num_channels.
+ apply_1x1_pre_sampling: Bool. Whether a 1x1 convolution will be applied to
+ the input feature map before the up/down-sampling operation.
+ apply_1x1_post_sampling: Bool. Whether a 1x1 convolution will be applied to
+ the input feature map after the up/down-sampling operation.
+
+ Returns:
+ A list of layers which may be applied to the input feature maps in order to
+ compute feature maps with the specified scale and number of channels.
+ """
+ # By default, 1x1 convolutions are only applied before sampling when the
+ # number of input and output channels differ.
+ if maybe_apply_1x1_conv and output_num_channels == input_num_channels:
+ apply_1x1_pre_sampling = False
+ apply_1x1_post_sampling = False
+
+ apply_bn_for_resampling = True
+ layers = []
+ if apply_1x1_pre_sampling:
+ layers.extend(
+ bifpn_utils.create_conv_block(
+ name=name + '1x1_pre_sample/',
+ num_filters=output_num_channels,
+ kernel_size=1,
+ strides=1,
+ padding='SAME',
+ use_separable=False,
+ apply_batchnorm=apply_bn_for_resampling,
+ apply_activation=False,
+ conv_hyperparams=conv_hyperparams,
+ is_training=is_training,
+ freeze_batchnorm=freeze_batchnorm))
+
+ layers.extend(
+ bifpn_utils.create_resample_feature_map_ops(input_scale, output_scale,
+ downsample_method,
+ use_native_resize_op,
+ conv_hyperparams, is_training,
+ freeze_batchnorm, name))
+
+ if apply_1x1_post_sampling:
+ layers.extend(
+ bifpn_utils.create_conv_block(
+ name=name + '1x1_post_sample/',
+ num_filters=output_num_channels,
+ kernel_size=1,
+ strides=1,
+ padding='SAME',
+ use_separable=False,
+ apply_batchnorm=apply_bn_for_resampling,
+ apply_activation=False,
+ conv_hyperparams=conv_hyperparams,
+ is_training=is_training,
+ freeze_batchnorm=freeze_batchnorm))
+
+ return layers
+
+
+def _create_bifpn_combine_op(num_inputs, name, combine_method):
+ """Creates a BiFPN output config, a list of the output BiFPN node names.
+
+ Args:
+ num_inputs: The number of inputs to this combine operation.
+ name: String. The name of this combine operation.
+ combine_method: String. The method used to combine input feature maps.
+
+ Returns:
+ A function which may be called with a list of num_inputs feature maps
+ and which will return a single feature map.
+ """
+
+ combine_op = None
+ if num_inputs < 1:
+ raise ValueError('Expected at least 1 input for BiFPN combine.')
+ elif num_inputs == 1:
+ combine_op = lambda x: x[0]
+ else:
+ combine_op = bifpn_utils.BiFPNCombineLayer(
+ combine_method=combine_method, name=name)
+ return combine_op
+
+
+class KerasBiFpnFeatureMaps(tf.keras.Model):
+ """Generates Keras based BiFPN feature maps from an input feature map pyramid.
+
+ A Keras model that generates multi-scale feature maps for detection by
+ iteratively computing top-down and bottom-up feature pyramids, as in the
+ EfficientDet paper by Tan et al, see arxiv.org/abs/1911.09070 for details.
+ """
+
+ def __init__(self,
+ bifpn_num_iterations,
+ bifpn_num_filters,
+ fpn_min_level,
+ fpn_max_level,
+ input_max_level,
+ is_training,
+ conv_hyperparams,
+ freeze_batchnorm,
+ bifpn_node_params=None,
+ name=None):
+ """Constructor.
+
+ Args:
+ bifpn_num_iterations: The number of top-down bottom-up iterations.
+ bifpn_num_filters: The number of filters (channels) to be used for all
+ feature maps in this BiFPN.
+ fpn_min_level: The minimum pyramid level (highest feature map resolution)
+ to use in the BiFPN.
+ fpn_max_level: The maximum pyramid level (lowest feature map resolution)
+ to use in the BiFPN.
+ input_max_level: The maximum pyramid level that will be provided as input
+ to the BiFPN. Accordingly, the BiFPN will compute any additional pyramid
+ levels from input_max_level up to the desired fpn_max_level, with each
+ successivel level downsampling by a scale factor of 2 by default.
+ is_training: Indicates whether the feature generator is in training mode.
+ conv_hyperparams: A `hyperparams_builder.KerasLayerHyperparams` object
+ containing hyperparameters for convolution ops.
+ freeze_batchnorm: Bool. Whether to freeze batch norm parameters during
+ training or not. When training with a small batch size (e.g. 1), it is
+ desirable to freeze batch norm update and use pretrained batch norm
+ params.
+ bifpn_node_params: An optional dictionary that may be used to specify
+ default parameters for BiFPN nodes, without the need to provide a custom
+ bifpn_node_config. For example, if '{ combine_method: 'sum' }', then all
+ BiFPN nodes will combine input feature maps by summation, rather than
+ by the default fast attention method.
+ name: A string name scope to assign to the model. If 'None', Keras
+ will auto-generate one from the class name.
+ """
+ super(KerasBiFpnFeatureMaps, self).__init__(name=name)
+ bifpn_node_config = _create_bifpn_node_config(
+ bifpn_num_iterations, bifpn_num_filters, fpn_min_level, fpn_max_level,
+ input_max_level, bifpn_node_params)
+ bifpn_input_config = _create_bifpn_input_config(
+ fpn_min_level, fpn_max_level, input_max_level)
+ bifpn_output_node_names = _get_bifpn_output_node_names(
+ fpn_min_level, fpn_max_level, bifpn_node_config)
+
+ self.bifpn_node_config = bifpn_node_config
+ self.bifpn_output_node_names = bifpn_output_node_names
+ self.node_input_blocks = []
+ self.node_combine_op = []
+ self.node_post_combine_block = []
+
+ all_node_params = bifpn_input_config
+ all_node_names = [node['name'] for node in all_node_params]
+ for node_config in bifpn_node_config:
+ # Maybe transform and/or resample input feature maps.
+ input_blocks = []
+ for input_name in node_config['inputs']:
+ if input_name not in all_node_names:
+ raise ValueError(
+ 'Input feature map ({}) does not exist:'.format(input_name))
+ input_index = all_node_names.index(input_name)
+ input_params = all_node_params[input_index]
+ input_block = node_config['input_op'](
+ name='{}/input_{}/'.format(node_config['name'], input_name),
+ input_scale=input_params['scale'],
+ input_num_channels=input_params.get('num_channels', None),
+ output_scale=node_config['scale'],
+ output_num_channels=node_config['num_channels'],
+ conv_hyperparams=conv_hyperparams,
+ is_training=is_training,
+ freeze_batchnorm=freeze_batchnorm)
+ input_blocks.append((input_index, input_block))
+
+ # Combine input feature maps.
+ combine_op = _create_bifpn_combine_op(
+ num_inputs=len(input_blocks),
+ name=(node_config['name'] + '/combine'),
+ combine_method=node_config['combine_method'])
+
+ # Post-combine layers.
+ post_combine_block = []
+ if node_config['post_combine_op']:
+ post_combine_block.extend(node_config['post_combine_op'](
+ name=node_config['name'] + '/post_combine/',
+ conv_hyperparams=conv_hyperparams,
+ is_training=is_training,
+ freeze_batchnorm=freeze_batchnorm))
+
+ self.node_input_blocks.append(input_blocks)
+ self.node_combine_op.append(combine_op)
+ self.node_post_combine_block.append(post_combine_block)
+ all_node_params.append(node_config)
+ all_node_names.append(node_config['name'])
+
+ def call(self, feature_pyramid):
+ """Compute BiFPN feature maps from input feature pyramid.
+
+ Executed when calling the `.__call__` method on input.
+
+ Args:
+ feature_pyramid: list of tuples of (tensor_name, image_feature_tensor).
+
+ Returns:
+ feature_maps: an OrderedDict mapping keys (feature map names) to
+ tensors where each tensor has shape [batch, height_i, width_i, depth_i].
+ """
+ feature_maps = [el[1] for el in feature_pyramid]
+ output_feature_maps = [None for node in self.bifpn_output_node_names]
+
+ for index, node in enumerate(self.bifpn_node_config):
+ node_scope = 'node_{:02d}'.format(index)
+ with tf.name_scope(node_scope):
+ # Apply layer blocks to this node's input feature maps.
+ input_block_results = []
+ for input_index, input_block in self.node_input_blocks[index]:
+ block_result = feature_maps[input_index]
+ for layer in input_block:
+ block_result = layer(block_result)
+ input_block_results.append(block_result)
+
+ # Combine the resulting feature maps.
+ node_result = self.node_combine_op[index](input_block_results)
+
+ # Apply post-combine layer block if applicable.
+ for layer in self.node_post_combine_block[index]:
+ node_result = layer(node_result)
+
+ feature_maps.append(node_result)
+
+ if node['name'] in self.bifpn_output_node_names:
+ index = self.bifpn_output_node_names.index(node['name'])
+ output_feature_maps[index] = node_result
+
+ return collections.OrderedDict(
+ zip(self.bifpn_output_node_names, output_feature_maps))
diff --git a/research/object_detection/models/bidirectional_feature_pyramid_generators_tf2_test.py b/research/object_detection/models/bidirectional_feature_pyramid_generators_tf2_test.py
new file mode 100644
index 0000000000000000000000000000000000000000..cbc815cc446add205a5b307cd56cf81ee60a1041
--- /dev/null
+++ b/research/object_detection/models/bidirectional_feature_pyramid_generators_tf2_test.py
@@ -0,0 +1,167 @@
+# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+
+"""Tests for bidirectional feature pyramid generators."""
+import unittest
+from absl.testing import parameterized
+
+import tensorflow.compat.v1 as tf
+
+from google.protobuf import text_format
+
+from object_detection.builders import hyperparams_builder
+from object_detection.models import bidirectional_feature_pyramid_generators as bifpn_generators
+from object_detection.protos import hyperparams_pb2
+from object_detection.utils import test_case
+from object_detection.utils import test_utils
+from object_detection.utils import tf_version
+
+
+@parameterized.parameters({'bifpn_num_iterations': 2},
+ {'bifpn_num_iterations': 8})
+@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.')
+class BiFPNFeaturePyramidGeneratorTest(test_case.TestCase):
+
+ def _build_conv_hyperparams(self):
+ conv_hyperparams = hyperparams_pb2.Hyperparams()
+ conv_hyperparams_text_proto = """
+ regularizer {
+ l2_regularizer {
+ }
+ }
+ initializer {
+ truncated_normal_initializer {
+ }
+ }
+ force_use_bias: true
+ """
+ text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams)
+ return hyperparams_builder.KerasLayerHyperparams(conv_hyperparams)
+
+ def test_get_expected_feature_map_shapes(self, bifpn_num_iterations):
+ with test_utils.GraphContextOrNone() as g:
+ image_features = [
+ ('block3', tf.random_uniform([4, 16, 16, 256], dtype=tf.float32)),
+ ('block4', tf.random_uniform([4, 8, 8, 256], dtype=tf.float32)),
+ ('block5', tf.random_uniform([4, 4, 4, 256], dtype=tf.float32))
+ ]
+ bifpn_generator = bifpn_generators.KerasBiFpnFeatureMaps(
+ bifpn_num_iterations=bifpn_num_iterations,
+ bifpn_num_filters=128,
+ fpn_min_level=3,
+ fpn_max_level=7,
+ input_max_level=5,
+ is_training=True,
+ conv_hyperparams=self._build_conv_hyperparams(),
+ freeze_batchnorm=False)
+ def graph_fn():
+ feature_maps = bifpn_generator(image_features)
+ return feature_maps
+
+ expected_feature_map_shapes = {
+ '{}_dn_lvl_3'.format(bifpn_num_iterations): (4, 16, 16, 128),
+ '{}_up_lvl_4'.format(bifpn_num_iterations): (4, 8, 8, 128),
+ '{}_up_lvl_5'.format(bifpn_num_iterations): (4, 4, 4, 128),
+ '{}_up_lvl_6'.format(bifpn_num_iterations): (4, 2, 2, 128),
+ '{}_up_lvl_7'.format(bifpn_num_iterations): (4, 1, 1, 128)}
+ out_feature_maps = self.execute(graph_fn, [], g)
+ out_feature_map_shapes = dict(
+ (key, value.shape) for key, value in out_feature_maps.items())
+ self.assertDictEqual(expected_feature_map_shapes, out_feature_map_shapes)
+
+ def test_get_expected_variable_names(self, bifpn_num_iterations):
+ with test_utils.GraphContextOrNone() as g:
+ image_features = [
+ ('block3', tf.random_uniform([4, 16, 16, 256], dtype=tf.float32)),
+ ('block4', tf.random_uniform([4, 8, 8, 256], dtype=tf.float32)),
+ ('block5', tf.random_uniform([4, 4, 4, 256], dtype=tf.float32))
+ ]
+ bifpn_generator = bifpn_generators.KerasBiFpnFeatureMaps(
+ bifpn_num_iterations=bifpn_num_iterations,
+ bifpn_num_filters=128,
+ fpn_min_level=3,
+ fpn_max_level=7,
+ input_max_level=5,
+ is_training=True,
+ conv_hyperparams=self._build_conv_hyperparams(),
+ freeze_batchnorm=False,
+ name='bifpn')
+ def graph_fn():
+ return bifpn_generator(image_features)
+
+ self.execute(graph_fn, [], g)
+ expected_variables = [
+ 'bifpn/node_00/0_up_lvl_6/input_0_up_lvl_5/1x1_pre_sample/conv/bias',
+ 'bifpn/node_00/0_up_lvl_6/input_0_up_lvl_5/1x1_pre_sample/conv/kernel',
+ 'bifpn/node_03/1_dn_lvl_5/input_0_up_lvl_5/1x1_pre_sample/conv/bias',
+ 'bifpn/node_03/1_dn_lvl_5/input_0_up_lvl_5/1x1_pre_sample/conv/kernel',
+ 'bifpn/node_04/1_dn_lvl_4/input_0_up_lvl_4/1x1_pre_sample/conv/bias',
+ 'bifpn/node_04/1_dn_lvl_4/input_0_up_lvl_4/1x1_pre_sample/conv/kernel',
+ 'bifpn/node_05/1_dn_lvl_3/input_0_up_lvl_3/1x1_pre_sample/conv/bias',
+ 'bifpn/node_05/1_dn_lvl_3/input_0_up_lvl_3/1x1_pre_sample/conv/kernel',
+ 'bifpn/node_06/1_up_lvl_4/input_0_up_lvl_4/1x1_pre_sample/conv/bias',
+ 'bifpn/node_06/1_up_lvl_4/input_0_up_lvl_4/1x1_pre_sample/conv/kernel',
+ 'bifpn/node_07/1_up_lvl_5/input_0_up_lvl_5/1x1_pre_sample/conv/bias',
+ 'bifpn/node_07/1_up_lvl_5/input_0_up_lvl_5/1x1_pre_sample/conv/kernel']
+ expected_node_variable_patterns = [
+ ['bifpn/node_{:02}/{}_dn_lvl_6/combine/bifpn_combine_weights',
+ 'bifpn/node_{:02}/{}_dn_lvl_6/post_combine/separable_conv/bias',
+ 'bifpn/node_{:02}/{}_dn_lvl_6/post_combine/separable_conv/depthwise_kernel',
+ 'bifpn/node_{:02}/{}_dn_lvl_6/post_combine/separable_conv/pointwise_kernel'],
+ ['bifpn/node_{:02}/{}_dn_lvl_5/combine/bifpn_combine_weights',
+ 'bifpn/node_{:02}/{}_dn_lvl_5/post_combine/separable_conv/bias',
+ 'bifpn/node_{:02}/{}_dn_lvl_5/post_combine/separable_conv/depthwise_kernel',
+ 'bifpn/node_{:02}/{}_dn_lvl_5/post_combine/separable_conv/pointwise_kernel'],
+ ['bifpn/node_{:02}/{}_dn_lvl_4/combine/bifpn_combine_weights',
+ 'bifpn/node_{:02}/{}_dn_lvl_4/post_combine/separable_conv/bias',
+ 'bifpn/node_{:02}/{}_dn_lvl_4/post_combine/separable_conv/depthwise_kernel',
+ 'bifpn/node_{:02}/{}_dn_lvl_4/post_combine/separable_conv/pointwise_kernel'],
+ ['bifpn/node_{:02}/{}_dn_lvl_3/combine/bifpn_combine_weights',
+ 'bifpn/node_{:02}/{}_dn_lvl_3/post_combine/separable_conv/bias',
+ 'bifpn/node_{:02}/{}_dn_lvl_3/post_combine/separable_conv/depthwise_kernel',
+ 'bifpn/node_{:02}/{}_dn_lvl_3/post_combine/separable_conv/pointwise_kernel'],
+ ['bifpn/node_{:02}/{}_up_lvl_4/combine/bifpn_combine_weights',
+ 'bifpn/node_{:02}/{}_up_lvl_4/post_combine/separable_conv/bias',
+ 'bifpn/node_{:02}/{}_up_lvl_4/post_combine/separable_conv/depthwise_kernel',
+ 'bifpn/node_{:02}/{}_up_lvl_4/post_combine/separable_conv/pointwise_kernel'],
+ ['bifpn/node_{:02}/{}_up_lvl_5/combine/bifpn_combine_weights',
+ 'bifpn/node_{:02}/{}_up_lvl_5/post_combine/separable_conv/bias',
+ 'bifpn/node_{:02}/{}_up_lvl_5/post_combine/separable_conv/depthwise_kernel',
+ 'bifpn/node_{:02}/{}_up_lvl_5/post_combine/separable_conv/pointwise_kernel'],
+ ['bifpn/node_{:02}/{}_up_lvl_6/combine/bifpn_combine_weights',
+ 'bifpn/node_{:02}/{}_up_lvl_6/post_combine/separable_conv/bias',
+ 'bifpn/node_{:02}/{}_up_lvl_6/post_combine/separable_conv/depthwise_kernel',
+ 'bifpn/node_{:02}/{}_up_lvl_6/post_combine/separable_conv/pointwise_kernel'],
+ ['bifpn/node_{:02}/{}_up_lvl_7/combine/bifpn_combine_weights',
+ 'bifpn/node_{:02}/{}_up_lvl_7/post_combine/separable_conv/bias',
+ 'bifpn/node_{:02}/{}_up_lvl_7/post_combine/separable_conv/depthwise_kernel',
+ 'bifpn/node_{:02}/{}_up_lvl_7/post_combine/separable_conv/pointwise_kernel']]
+
+ node_i = 2
+ for iter_i in range(1, bifpn_num_iterations+1):
+ for node_variable_patterns in expected_node_variable_patterns:
+ for pattern in node_variable_patterns:
+ expected_variables.append(pattern.format(node_i, iter_i))
+ node_i += 1
+
+ expected_variables = set(expected_variables)
+ actual_variable_set = set(
+ [var.name.split(':')[0] for var in bifpn_generator.variables])
+ self.assertSetEqual(expected_variables, actual_variable_set)
+
+# TODO(aom): Tests for create_bifpn_combine_op.
+
+if __name__ == '__main__':
+ tf.test.main()
diff --git a/research/object_detection/models/center_net_hourglass_feature_extractor.py b/research/object_detection/models/center_net_hourglass_feature_extractor.py
new file mode 100644
index 0000000000000000000000000000000000000000..4761915aa5ad0023673199f2083ff355816f7bb1
--- /dev/null
+++ b/research/object_detection/models/center_net_hourglass_feature_extractor.py
@@ -0,0 +1,75 @@
+# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Hourglass[1] feature extractor for CenterNet[2] meta architecture.
+
+[1]: https://arxiv.org/abs/1603.06937
+[2]: https://arxiv.org/abs/1904.07850
+"""
+
+from object_detection.meta_architectures import center_net_meta_arch
+from object_detection.models.keras_models import hourglass_network
+
+
+class CenterNetHourglassFeatureExtractor(
+ center_net_meta_arch.CenterNetFeatureExtractor):
+ """The hourglass feature extractor for CenterNet.
+
+ This class is a thin wrapper around the HourglassFeatureExtractor class
+ along with some preprocessing methods inherited from the base class.
+ """
+
+ def __init__(self, hourglass_net, channel_means=(0., 0., 0.),
+ channel_stds=(1., 1., 1.), bgr_ordering=False):
+ """Intializes the feature extractor.
+
+ Args:
+ hourglass_net: The underlying hourglass network to use.
+ channel_means: A tuple of floats, denoting the mean of each channel
+ which will be subtracted from it.
+ channel_stds: A tuple of floats, denoting the standard deviation of each
+ channel. Each channel will be divided by its standard deviation value.
+ bgr_ordering: bool, if set will change the channel ordering to be in the
+ [blue, red, green] order.
+ """
+
+ super(CenterNetHourglassFeatureExtractor, self).__init__(
+ channel_means=channel_means, channel_stds=channel_stds,
+ bgr_ordering=bgr_ordering)
+ self._network = hourglass_net
+
+ def call(self, inputs):
+ return self._network(inputs)
+
+ @property
+ def out_stride(self):
+ """The stride in the output image of the network."""
+ return 4
+
+ @property
+ def num_feature_outputs(self):
+ """Ther number of feature outputs returned by the feature extractor."""
+ return self._network.num_hourglasses
+
+ def get_model(self):
+ return self._network
+
+
+def hourglass_104(channel_means, channel_stds, bgr_ordering):
+ """The Hourglass-104 backbone for CenterNet."""
+
+ network = hourglass_network.hourglass_104()
+ return CenterNetHourglassFeatureExtractor(
+ network, channel_means=channel_means, channel_stds=channel_stds,
+ bgr_ordering=bgr_ordering)
diff --git a/research/object_detection/models/center_net_hourglass_feature_extractor_tf2_test.py b/research/object_detection/models/center_net_hourglass_feature_extractor_tf2_test.py
new file mode 100644
index 0000000000000000000000000000000000000000..19d5cbe9843ff03d6d1499a02980a067dc305579
--- /dev/null
+++ b/research/object_detection/models/center_net_hourglass_feature_extractor_tf2_test.py
@@ -0,0 +1,44 @@
+# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Testing hourglass feature extractor for CenterNet."""
+import unittest
+import numpy as np
+import tensorflow.compat.v1 as tf
+
+from object_detection.models import center_net_hourglass_feature_extractor as hourglass
+from object_detection.models.keras_models import hourglass_network
+from object_detection.utils import test_case
+from object_detection.utils import tf_version
+
+
+@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.')
+class CenterNetHourglassFeatureExtractorTest(test_case.TestCase):
+
+ def test_center_net_hourglass_feature_extractor(self):
+
+ net = hourglass_network.HourglassNetwork(
+ num_stages=4, blocks_per_stage=[2, 3, 4, 5, 6],
+ channel_dims=[4, 6, 8, 10, 12, 14], num_hourglasses=2)
+
+ model = hourglass.CenterNetHourglassFeatureExtractor(net)
+ def graph_fn():
+ return model(tf.zeros((2, 64, 64, 3), dtype=np.float32))
+ outputs = self.execute(graph_fn, [])
+ self.assertEqual(outputs[0].shape, (2, 16, 16, 6))
+ self.assertEqual(outputs[1].shape, (2, 16, 16, 6))
+
+
+if __name__ == '__main__':
+ tf.test.main()
diff --git a/research/object_detection/models/center_net_resnet_feature_extractor.py b/research/object_detection/models/center_net_resnet_feature_extractor.py
new file mode 100644
index 0000000000000000000000000000000000000000..477fa4c50ea9e0bc62b43a75c1674acfef7a183c
--- /dev/null
+++ b/research/object_detection/models/center_net_resnet_feature_extractor.py
@@ -0,0 +1,149 @@
+# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Resnetv2 based feature extractors for CenterNet[1] meta architecture.
+
+[1]: https://arxiv.org/abs/1904.07850
+"""
+
+
+import tensorflow.compat.v1 as tf
+
+from object_detection.meta_architectures.center_net_meta_arch import CenterNetFeatureExtractor
+
+
+class CenterNetResnetFeatureExtractor(CenterNetFeatureExtractor):
+ """Resnet v2 base feature extractor for the CenterNet model."""
+
+ def __init__(self, resnet_type, channel_means=(0., 0., 0.),
+ channel_stds=(1., 1., 1.), bgr_ordering=False):
+ """Initializes the feature extractor with a specific ResNet architecture.
+
+ Args:
+ resnet_type: A string specifying which kind of ResNet to use. Currently
+ only `resnet_v2_50` and `resnet_v2_101` are supported.
+ channel_means: A tuple of floats, denoting the mean of each channel
+ which will be subtracted from it.
+ channel_stds: A tuple of floats, denoting the standard deviation of each
+ channel. Each channel will be divided by its standard deviation value.
+ bgr_ordering: bool, if set will change the channel ordering to be in the
+ [blue, red, green] order.
+
+ """
+
+ super(CenterNetResnetFeatureExtractor, self).__init__(
+ channel_means=channel_means, channel_stds=channel_stds,
+ bgr_ordering=bgr_ordering)
+ if resnet_type == 'resnet_v2_101':
+ self._base_model = tf.keras.applications.ResNet101V2(weights=None)
+ output_layer = 'conv5_block3_out'
+ elif resnet_type == 'resnet_v2_50':
+ self._base_model = tf.keras.applications.ResNet50V2(weights=None)
+ output_layer = 'conv5_block3_out'
+ else:
+ raise ValueError('Unknown Resnet Model {}'.format(resnet_type))
+ output_layer = self._base_model.get_layer(output_layer)
+
+ self._resnet_model = tf.keras.models.Model(inputs=self._base_model.input,
+ outputs=output_layer.output)
+ resnet_output = self._resnet_model(self._base_model.input)
+
+ for num_filters in [256, 128, 64]:
+ # TODO(vighneshb) This section has a few differences from the paper
+ # Figure out how much of a performance impact they have.
+
+ # 1. We use a simple convolution instead of a deformable convolution
+ conv = tf.keras.layers.Conv2D(filters=num_filters, kernel_size=3,
+ strides=1, padding='same')
+ resnet_output = conv(resnet_output)
+ resnet_output = tf.keras.layers.BatchNormalization()(resnet_output)
+ resnet_output = tf.keras.layers.ReLU()(resnet_output)
+
+ # 2. We use the default initialization for the convolution layers
+ # instead of initializing it to do bilinear upsampling.
+ conv_transpose = tf.keras.layers.Conv2DTranspose(filters=num_filters,
+ kernel_size=3, strides=2,
+ padding='same')
+ resnet_output = conv_transpose(resnet_output)
+ resnet_output = tf.keras.layers.BatchNormalization()(resnet_output)
+ resnet_output = tf.keras.layers.ReLU()(resnet_output)
+
+ self._feature_extractor_model = tf.keras.models.Model(
+ inputs=self._base_model.input, outputs=resnet_output)
+
+ def preprocess(self, resized_inputs):
+ """Preprocess input images for the ResNet model.
+
+ This scales images in the range [0, 255] to the range [-1, 1]
+
+ Args:
+ resized_inputs: a [batch, height, width, channels] float32 tensor.
+
+ Returns:
+ outputs: a [batch, height, width, channels] float32 tensor.
+
+ """
+ resized_inputs = super(CenterNetResnetFeatureExtractor, self).preprocess(
+ resized_inputs)
+ return tf.keras.applications.resnet_v2.preprocess_input(resized_inputs)
+
+ def load_feature_extractor_weights(self, path):
+ self._base_model.load_weights(path)
+
+ def get_base_model(self):
+ """Get base resnet model for inspection and testing."""
+ return self._base_model
+
+ def call(self, inputs):
+ """Returns image features extracted by the backbone.
+
+ Args:
+ inputs: An image tensor of shape [batch_size, input_height,
+ input_width, 3]
+
+ Returns:
+ features_list: A list of length 1 containing a tensor of shape
+ [batch_size, input_height // 4, input_width // 4, 64] containing
+ the features extracted by the ResNet.
+ """
+ return [self._feature_extractor_model(inputs)]
+
+ @property
+ def num_feature_outputs(self):
+ return 1
+
+ @property
+ def out_stride(self):
+ return 4
+
+
+def resnet_v2_101(channel_means, channel_stds, bgr_ordering):
+ """The ResNet v2 101 feature extractor."""
+
+ return CenterNetResnetFeatureExtractor(
+ resnet_type='resnet_v2_101',
+ channel_means=channel_means,
+ channel_stds=channel_stds,
+ bgr_ordering=bgr_ordering
+ )
+
+
+def resnet_v2_50(channel_means, channel_stds, bgr_ordering):
+ """The ResNet v2 50 feature extractor."""
+
+ return CenterNetResnetFeatureExtractor(
+ resnet_type='resnet_v2_50',
+ channel_means=channel_means,
+ channel_stds=channel_stds,
+ bgr_ordering=bgr_ordering)
diff --git a/research/object_detection/models/center_net_resnet_feature_extractor_tf2_test.py b/research/object_detection/models/center_net_resnet_feature_extractor_tf2_test.py
new file mode 100644
index 0000000000000000000000000000000000000000..3429c0442053982d3d3d9502508ede3177cbf102
--- /dev/null
+++ b/research/object_detection/models/center_net_resnet_feature_extractor_tf2_test.py
@@ -0,0 +1,54 @@
+# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Testing ResNet v2 models for the CenterNet meta architecture."""
+import unittest
+import numpy as np
+import tensorflow.compat.v1 as tf
+
+from object_detection.models import center_net_resnet_feature_extractor
+from object_detection.utils import test_case
+from object_detection.utils import tf_version
+
+
+@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.')
+class CenterNetResnetFeatureExtractorTest(test_case.TestCase):
+
+ def test_output_size(self):
+ """Verify that shape of features returned by the backbone is correct."""
+
+ model = center_net_resnet_feature_extractor.\
+ CenterNetResnetFeatureExtractor('resnet_v2_101')
+ def graph_fn():
+ img = np.zeros((8, 224, 224, 3), dtype=np.float32)
+ processed_img = model.preprocess(img)
+ return model(processed_img)
+ outputs = self.execute(graph_fn, [])
+ self.assertEqual(outputs.shape, (8, 56, 56, 64))
+
+ def test_output_size_resnet50(self):
+ """Verify that shape of features returned by the backbone is correct."""
+
+ model = center_net_resnet_feature_extractor.\
+ CenterNetResnetFeatureExtractor('resnet_v2_50')
+ def graph_fn():
+ img = np.zeros((8, 224, 224, 3), dtype=np.float32)
+ processed_img = model.preprocess(img)
+ return model(processed_img)
+ outputs = self.execute(graph_fn, [])
+ self.assertEqual(outputs.shape, (8, 56, 56, 64))
+
+
+if __name__ == '__main__':
+ tf.test.main()
diff --git a/research/object_detection/models/center_net_resnet_v1_fpn_feature_extractor.py b/research/object_detection/models/center_net_resnet_v1_fpn_feature_extractor.py
new file mode 100644
index 0000000000000000000000000000000000000000..842e9cf1b2e5393a6bc87df3989f173d0409de70
--- /dev/null
+++ b/research/object_detection/models/center_net_resnet_v1_fpn_feature_extractor.py
@@ -0,0 +1,176 @@
+# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Resnetv1 FPN [1] based feature extractors for CenterNet[2] meta architecture.
+
+
+[1]: https://arxiv.org/abs/1612.03144.
+[2]: https://arxiv.org/abs/1904.07850.
+"""
+import tensorflow.compat.v1 as tf
+
+from object_detection.meta_architectures.center_net_meta_arch import CenterNetFeatureExtractor
+
+
+_RESNET_MODEL_OUTPUT_LAYERS = {
+ 'resnet_v1_50': ['conv2_block3_out', 'conv3_block4_out',
+ 'conv4_block6_out', 'conv5_block3_out'],
+ 'resnet_v1_101': ['conv2_block3_out', 'conv3_block4_out',
+ 'conv4_block23_out', 'conv5_block3_out'],
+}
+
+
+class CenterNetResnetV1FpnFeatureExtractor(CenterNetFeatureExtractor):
+ """Resnet v1 FPN base feature extractor for the CenterNet model.
+
+ This feature extractor uses residual skip connections and nearest neighbor
+ upsampling to produce an output feature map of stride 4, which has precise
+ localization information along with strong semantic information from the top
+ of the net. This design does not exactly follow the original FPN design,
+ specifically:
+ - Since only one output map is necessary for heatmap prediction (stride 4
+ output), the top-down feature maps can have different numbers of channels.
+ Specifically, the top down feature maps have the following sizes:
+ [h/4, w/4, 64], [h/8, w/8, 128], [h/16, w/16, 256], [h/32, w/32, 256].
+ - No additional coarse features are used after conv5_x.
+ """
+
+ def __init__(self, resnet_type, channel_means=(0., 0., 0.),
+ channel_stds=(1., 1., 1.), bgr_ordering=False):
+ """Initializes the feature extractor with a specific ResNet architecture.
+
+ Args:
+ resnet_type: A string specifying which kind of ResNet to use. Currently
+ only `resnet_v1_50` and `resnet_v1_101` are supported.
+ channel_means: A tuple of floats, denoting the mean of each channel
+ which will be subtracted from it.
+ channel_stds: A tuple of floats, denoting the standard deviation of each
+ channel. Each channel will be divided by its standard deviation value.
+ bgr_ordering: bool, if set will change the channel ordering to be in the
+ [blue, red, green] order.
+
+ """
+
+ super(CenterNetResnetV1FpnFeatureExtractor, self).__init__(
+ channel_means=channel_means, channel_stds=channel_stds,
+ bgr_ordering=bgr_ordering)
+ if resnet_type == 'resnet_v1_50':
+ self._base_model = tf.keras.applications.ResNet50(weights=None)
+ elif resnet_type == 'resnet_v1_101':
+ self._base_model = tf.keras.applications.ResNet101(weights=None)
+ else:
+ raise ValueError('Unknown Resnet Model {}'.format(resnet_type))
+ output_layers = _RESNET_MODEL_OUTPUT_LAYERS[resnet_type]
+ outputs = [self._base_model.get_layer(output_layer_name).output
+ for output_layer_name in output_layers]
+
+ self._resnet_model = tf.keras.models.Model(inputs=self._base_model.input,
+ outputs=outputs)
+ resnet_outputs = self._resnet_model(self._base_model.input)
+
+ # Construct the top-down feature maps.
+ top_layer = resnet_outputs[-1]
+ residual_op = tf.keras.layers.Conv2D(filters=256, kernel_size=1,
+ strides=1, padding='same')
+ top_down = residual_op(top_layer)
+
+ num_filters_list = [256, 128, 64]
+ for i, num_filters in enumerate(num_filters_list):
+ level_ind = 2 - i
+ # Upsample.
+ upsample_op = tf.keras.layers.UpSampling2D(2, interpolation='nearest')
+ top_down = upsample_op(top_down)
+
+ # Residual (skip-connection) from bottom-up pathway.
+ residual_op = tf.keras.layers.Conv2D(filters=num_filters, kernel_size=1,
+ strides=1, padding='same')
+ residual = residual_op(resnet_outputs[level_ind])
+
+ # Merge.
+ top_down = top_down + residual
+ next_num_filters = num_filters_list[i+1] if i + 1 <= 2 else 64
+ conv = tf.keras.layers.Conv2D(filters=next_num_filters,
+ kernel_size=3, strides=1, padding='same')
+ top_down = conv(top_down)
+ top_down = tf.keras.layers.BatchNormalization()(top_down)
+ top_down = tf.keras.layers.ReLU()(top_down)
+
+ self._feature_extractor_model = tf.keras.models.Model(
+ inputs=self._base_model.input, outputs=top_down)
+
+ def preprocess(self, resized_inputs):
+ """Preprocess input images for the ResNet model.
+
+ This scales images in the range [0, 255] to the range [-1, 1]
+
+ Args:
+ resized_inputs: a [batch, height, width, channels] float32 tensor.
+
+ Returns:
+ outputs: a [batch, height, width, channels] float32 tensor.
+
+ """
+ resized_inputs = super(
+ CenterNetResnetV1FpnFeatureExtractor, self).preprocess(resized_inputs)
+ return tf.keras.applications.resnet.preprocess_input(resized_inputs)
+
+ def load_feature_extractor_weights(self, path):
+ self._base_model.load_weights(path)
+
+ def get_base_model(self):
+ """Get base resnet model for inspection and testing."""
+ return self._base_model
+
+ def call(self, inputs):
+ """Returns image features extracted by the backbone.
+
+ Args:
+ inputs: An image tensor of shape [batch_size, input_height,
+ input_width, 3]
+
+ Returns:
+ features_list: A list of length 1 containing a tensor of shape
+ [batch_size, input_height // 4, input_width // 4, 64] containing
+ the features extracted by the ResNet.
+ """
+ return [self._feature_extractor_model(inputs)]
+
+ @property
+ def num_feature_outputs(self):
+ return 1
+
+ @property
+ def out_stride(self):
+ return 4
+
+
+def resnet_v1_101_fpn(channel_means, channel_stds, bgr_ordering):
+ """The ResNet v1 101 FPN feature extractor."""
+
+ return CenterNetResnetV1FpnFeatureExtractor(
+ resnet_type='resnet_v1_101',
+ channel_means=channel_means,
+ channel_stds=channel_stds,
+ bgr_ordering=bgr_ordering
+ )
+
+
+def resnet_v1_50_fpn(channel_means, channel_stds, bgr_ordering):
+ """The ResNet v1 50 FPN feature extractor."""
+
+ return CenterNetResnetV1FpnFeatureExtractor(
+ resnet_type='resnet_v1_50',
+ channel_means=channel_means,
+ channel_stds=channel_stds,
+ bgr_ordering=bgr_ordering)
diff --git a/research/object_detection/models/center_net_resnet_v1_fpn_feature_extractor_tf2_test.py b/research/object_detection/models/center_net_resnet_v1_fpn_feature_extractor_tf2_test.py
new file mode 100644
index 0000000000000000000000000000000000000000..3f1524904f0a055e48342d09febdd7bd3ec6fb3c
--- /dev/null
+++ b/research/object_detection/models/center_net_resnet_v1_fpn_feature_extractor_tf2_test.py
@@ -0,0 +1,49 @@
+# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Testing ResNet v1 FPN models for the CenterNet meta architecture."""
+import unittest
+from absl.testing import parameterized
+
+import numpy as np
+import tensorflow.compat.v1 as tf
+
+from object_detection.models import center_net_resnet_v1_fpn_feature_extractor
+from object_detection.utils import test_case
+from object_detection.utils import tf_version
+
+
+@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.')
+class CenterNetResnetV1FpnFeatureExtractorTest(test_case.TestCase,
+ parameterized.TestCase):
+
+ @parameterized.parameters(
+ {'resnet_type': 'resnet_v1_50'},
+ {'resnet_type': 'resnet_v1_101'},
+ )
+ def test_correct_output_size(self, resnet_type):
+ """Verify that shape of features returned by the backbone is correct."""
+
+ model = center_net_resnet_v1_fpn_feature_extractor.\
+ CenterNetResnetV1FpnFeatureExtractor(resnet_type)
+ def graph_fn():
+ img = np.zeros((8, 224, 224, 3), dtype=np.float32)
+ processed_img = model.preprocess(img)
+ return model(processed_img)
+
+ self.assertEqual(self.execute(graph_fn, []).shape, (8, 56, 56, 64))
+
+
+if __name__ == '__main__':
+ tf.test.main()
diff --git a/research/object_detection/models/embedded_ssd_mobilenet_v1_feature_extractor_test.py b/research/object_detection/models/embedded_ssd_mobilenet_v1_feature_extractor_tf1_test.py
similarity index 97%
rename from research/object_detection/models/embedded_ssd_mobilenet_v1_feature_extractor_test.py
rename to research/object_detection/models/embedded_ssd_mobilenet_v1_feature_extractor_tf1_test.py
index fd7e0454419abdaa188607d007fd52b47c4a079c..4a27e8c8d649c4cb9ae961bffafc7ad824b63b25 100644
--- a/research/object_detection/models/embedded_ssd_mobilenet_v1_feature_extractor_test.py
+++ b/research/object_detection/models/embedded_ssd_mobilenet_v1_feature_extractor_tf1_test.py
@@ -14,13 +14,16 @@
# ==============================================================================
"""Tests for embedded_ssd_mobilenet_v1_feature_extractor."""
+import unittest
import numpy as np
import tensorflow.compat.v1 as tf
from object_detection.models import embedded_ssd_mobilenet_v1_feature_extractor
from object_detection.models import ssd_feature_extractor_test
+from object_detection.utils import tf_version
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.')
class EmbeddedSSDMobileNetV1FeatureExtractorTest(
ssd_feature_extractor_test.SsdFeatureExtractorTestBase):
diff --git a/research/object_detection/models/faster_rcnn_inception_resnet_v2_feature_extractor_test.py b/research/object_detection/models/faster_rcnn_inception_resnet_v2_feature_extractor_tf1_test.py
similarity index 97%
rename from research/object_detection/models/faster_rcnn_inception_resnet_v2_feature_extractor_test.py
rename to research/object_detection/models/faster_rcnn_inception_resnet_v2_feature_extractor_tf1_test.py
index 8b5351c8f5bf0e236f97aa0466aeedce7d4976df..2505fbfb3ad6e8621a3b2d05caba506b350f0f49 100644
--- a/research/object_detection/models/faster_rcnn_inception_resnet_v2_feature_extractor_test.py
+++ b/research/object_detection/models/faster_rcnn_inception_resnet_v2_feature_extractor_tf1_test.py
@@ -14,12 +14,14 @@
# ==============================================================================
"""Tests for models.faster_rcnn_inception_resnet_v2_feature_extractor."""
-
+import unittest
import tensorflow.compat.v1 as tf
from object_detection.models import faster_rcnn_inception_resnet_v2_feature_extractor as frcnn_inc_res
+from object_detection.utils import tf_version
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.')
class FasterRcnnInceptionResnetV2FeatureExtractorTest(tf.test.TestCase):
def _build_feature_extractor(self, first_stage_features_stride):
diff --git a/research/object_detection/models/faster_rcnn_inception_resnet_v2_keras_feature_extractor.py b/research/object_detection/models/faster_rcnn_inception_resnet_v2_keras_feature_extractor.py
index 9196871bd3acbdf5d2b8379b56e1a8778daf3065..f185aa01dd377c66b94ca37cc244350b2071f21c 100644
--- a/research/object_detection/models/faster_rcnn_inception_resnet_v2_keras_feature_extractor.py
+++ b/research/object_detection/models/faster_rcnn_inception_resnet_v2_keras_feature_extractor.py
@@ -59,6 +59,7 @@ class FasterRCNNInceptionResnetV2KerasFeatureExtractor(
is_training, first_stage_features_stride, batch_norm_trainable,
weight_decay)
self._variable_dict = {}
+ self.classification_backbone = None
def preprocess(self, resized_inputs):
"""Faster R-CNN with Inception Resnet v2 preprocessing.
@@ -95,19 +96,20 @@ class FasterRCNNInceptionResnetV2KerasFeatureExtractor(
And returns rpn_feature_map:
A tensor with shape [batch, height, width, depth]
"""
- with tf.name_scope(name):
- with tf.name_scope('InceptionResnetV2'):
- model = inception_resnet_v2.inception_resnet_v2(
+ if not self.classification_backbone:
+ self.classification_backbone = inception_resnet_v2.inception_resnet_v2(
self._train_batch_norm,
output_stride=self._first_stage_features_stride,
align_feature_maps=True,
weight_decay=self._weight_decay,
weights=None,
include_top=False)
- proposal_features = model.get_layer(
+ with tf.name_scope(name):
+ with tf.name_scope('InceptionResnetV2'):
+ proposal_features = self.classification_backbone.get_layer(
name='block17_20_ac').output
keras_model = tf.keras.Model(
- inputs=model.inputs,
+ inputs=self.classification_backbone.inputs,
outputs=proposal_features)
for variable in keras_model.variables:
self._variable_dict[variable.name[:-2]] = variable
@@ -132,962 +134,26 @@ class FasterRCNNInceptionResnetV2KerasFeatureExtractor(
[batch_size * self.max_num_proposals, height, width, depth]
representing box classifier features for each proposal.
"""
+ if not self.classification_backbone:
+ self.classification_backbone = inception_resnet_v2.inception_resnet_v2(
+ self._train_batch_norm,
+ output_stride=self._first_stage_features_stride,
+ align_feature_maps=True,
+ weight_decay=self._weight_decay,
+ weights=None,
+ include_top=False)
with tf.name_scope(name):
with tf.name_scope('InceptionResnetV2'):
- model = inception_resnet_v2.inception_resnet_v2(
- self._train_batch_norm,
- output_stride=16,
- align_feature_maps=False,
- weight_decay=self._weight_decay,
- weights=None,
- include_top=False)
-
- proposal_feature_maps = model.get_layer(
+ proposal_feature_maps = self.classification_backbone.get_layer(
name='block17_20_ac').output
- proposal_classifier_features = model.get_layer(
+ proposal_classifier_features = self.classification_backbone.get_layer(
name='conv_7b_ac').output
keras_model = model_util.extract_submodel(
- model=model,
+ model=self.classification_backbone,
inputs=proposal_feature_maps,
outputs=proposal_classifier_features)
for variable in keras_model.variables:
self._variable_dict[variable.name[:-2]] = variable
return keras_model
- def restore_from_classification_checkpoint_fn(
- self,
- first_stage_feature_extractor_scope,
- second_stage_feature_extractor_scope):
- """Returns a map of variables to load from a foreign checkpoint.
-
- This uses a hard-coded conversion to load into Keras from a slim-trained
- inception_resnet_v2 checkpoint.
- Note that this overrides the default implementation in
- faster_rcnn_meta_arch.FasterRCNNKerasFeatureExtractor which does not work
- for InceptionResnetV2 checkpoints.
-
- Args:
- first_stage_feature_extractor_scope: A scope name for the first stage
- feature extractor.
- second_stage_feature_extractor_scope: A scope name for the second stage
- feature extractor.
-
- Returns:
- A dict mapping variable names (to load from a checkpoint) to variables in
- the model graph.
- """
-
- keras_to_slim_name_mapping = {
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d/kernel': 'InceptionResnetV2/Conv2d_1a_3x3/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm/beta': 'InceptionResnetV2/Conv2d_1a_3x3/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm/moving_mean': 'InceptionResnetV2/Conv2d_1a_3x3/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm/moving_variance': 'InceptionResnetV2/Conv2d_1a_3x3/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_1/kernel': 'InceptionResnetV2/Conv2d_2a_3x3/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_1/beta': 'InceptionResnetV2/Conv2d_2a_3x3/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_1/moving_mean': 'InceptionResnetV2/Conv2d_2a_3x3/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_1/moving_variance': 'InceptionResnetV2/Conv2d_2a_3x3/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_2/kernel': 'InceptionResnetV2/Conv2d_2b_3x3/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_2/beta': 'InceptionResnetV2/Conv2d_2b_3x3/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_2/moving_mean': 'InceptionResnetV2/Conv2d_2b_3x3/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_2/moving_variance': 'InceptionResnetV2/Conv2d_2b_3x3/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_3/kernel': 'InceptionResnetV2/Conv2d_3b_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_3/beta': 'InceptionResnetV2/Conv2d_3b_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_3/moving_mean': 'InceptionResnetV2/Conv2d_3b_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_3/moving_variance': 'InceptionResnetV2/Conv2d_3b_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_4/kernel': 'InceptionResnetV2/Conv2d_4a_3x3/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_4/beta': 'InceptionResnetV2/Conv2d_4a_3x3/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_4/moving_mean': 'InceptionResnetV2/Conv2d_4a_3x3/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_4/moving_variance': 'InceptionResnetV2/Conv2d_4a_3x3/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_5/kernel': 'InceptionResnetV2/Mixed_5b/Branch_0/Conv2d_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_5/beta': 'InceptionResnetV2/Mixed_5b/Branch_0/Conv2d_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_5/moving_mean': 'InceptionResnetV2/Mixed_5b/Branch_0/Conv2d_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_5/moving_variance': 'InceptionResnetV2/Mixed_5b/Branch_0/Conv2d_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_6/kernel': 'InceptionResnetV2/Mixed_5b/Branch_1/Conv2d_0a_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_6/beta': 'InceptionResnetV2/Mixed_5b/Branch_1/Conv2d_0a_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_6/moving_mean': 'InceptionResnetV2/Mixed_5b/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_6/moving_variance': 'InceptionResnetV2/Mixed_5b/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_7/kernel': 'InceptionResnetV2/Mixed_5b/Branch_1/Conv2d_0b_5x5/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_7/beta': 'InceptionResnetV2/Mixed_5b/Branch_1/Conv2d_0b_5x5/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_7/moving_mean': 'InceptionResnetV2/Mixed_5b/Branch_1/Conv2d_0b_5x5/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_7/moving_variance': 'InceptionResnetV2/Mixed_5b/Branch_1/Conv2d_0b_5x5/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_8/kernel': 'InceptionResnetV2/Mixed_5b/Branch_2/Conv2d_0a_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_8/beta': 'InceptionResnetV2/Mixed_5b/Branch_2/Conv2d_0a_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_8/moving_mean': 'InceptionResnetV2/Mixed_5b/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_8/moving_variance': 'InceptionResnetV2/Mixed_5b/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_9/kernel': 'InceptionResnetV2/Mixed_5b/Branch_2/Conv2d_0b_3x3/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_9/beta': 'InceptionResnetV2/Mixed_5b/Branch_2/Conv2d_0b_3x3/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_9/moving_mean': 'InceptionResnetV2/Mixed_5b/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_9/moving_variance': 'InceptionResnetV2/Mixed_5b/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_10/kernel': 'InceptionResnetV2/Mixed_5b/Branch_2/Conv2d_0c_3x3/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_10/beta': 'InceptionResnetV2/Mixed_5b/Branch_2/Conv2d_0c_3x3/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_10/moving_mean': 'InceptionResnetV2/Mixed_5b/Branch_2/Conv2d_0c_3x3/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_10/moving_variance': 'InceptionResnetV2/Mixed_5b/Branch_2/Conv2d_0c_3x3/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_11/kernel': 'InceptionResnetV2/Mixed_5b/Branch_3/Conv2d_0b_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_11/beta': 'InceptionResnetV2/Mixed_5b/Branch_3/Conv2d_0b_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_11/moving_mean': 'InceptionResnetV2/Mixed_5b/Branch_3/Conv2d_0b_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_11/moving_variance': 'InceptionResnetV2/Mixed_5b/Branch_3/Conv2d_0b_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_12/kernel': 'InceptionResnetV2/Repeat/block35_1/Branch_0/Conv2d_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_12/beta': 'InceptionResnetV2/Repeat/block35_1/Branch_0/Conv2d_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_12/moving_mean': 'InceptionResnetV2/Repeat/block35_1/Branch_0/Conv2d_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_12/moving_variance': 'InceptionResnetV2/Repeat/block35_1/Branch_0/Conv2d_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_13/kernel': 'InceptionResnetV2/Repeat/block35_1/Branch_1/Conv2d_0a_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_13/beta': 'InceptionResnetV2/Repeat/block35_1/Branch_1/Conv2d_0a_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_13/moving_mean': 'InceptionResnetV2/Repeat/block35_1/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_13/moving_variance': 'InceptionResnetV2/Repeat/block35_1/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_14/kernel': 'InceptionResnetV2/Repeat/block35_1/Branch_1/Conv2d_0b_3x3/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_14/beta': 'InceptionResnetV2/Repeat/block35_1/Branch_1/Conv2d_0b_3x3/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_14/moving_mean': 'InceptionResnetV2/Repeat/block35_1/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_14/moving_variance': 'InceptionResnetV2/Repeat/block35_1/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_15/kernel': 'InceptionResnetV2/Repeat/block35_1/Branch_2/Conv2d_0a_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_15/beta': 'InceptionResnetV2/Repeat/block35_1/Branch_2/Conv2d_0a_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_15/moving_mean': 'InceptionResnetV2/Repeat/block35_1/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_15/moving_variance': 'InceptionResnetV2/Repeat/block35_1/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_16/kernel': 'InceptionResnetV2/Repeat/block35_1/Branch_2/Conv2d_0b_3x3/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_16/beta': 'InceptionResnetV2/Repeat/block35_1/Branch_2/Conv2d_0b_3x3/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_16/moving_mean': 'InceptionResnetV2/Repeat/block35_1/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_16/moving_variance': 'InceptionResnetV2/Repeat/block35_1/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_17/kernel': 'InceptionResnetV2/Repeat/block35_1/Branch_2/Conv2d_0c_3x3/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_17/beta': 'InceptionResnetV2/Repeat/block35_1/Branch_2/Conv2d_0c_3x3/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_17/moving_mean': 'InceptionResnetV2/Repeat/block35_1/Branch_2/Conv2d_0c_3x3/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_17/moving_variance': 'InceptionResnetV2/Repeat/block35_1/Branch_2/Conv2d_0c_3x3/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/block35_1_conv/kernel': 'InceptionResnetV2/Repeat/block35_1/Conv2d_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/block35_1_conv/bias': 'InceptionResnetV2/Repeat/block35_1/Conv2d_1x1/biases',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_18/kernel': 'InceptionResnetV2/Repeat/block35_2/Branch_0/Conv2d_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_18/beta': 'InceptionResnetV2/Repeat/block35_2/Branch_0/Conv2d_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_18/moving_mean': 'InceptionResnetV2/Repeat/block35_2/Branch_0/Conv2d_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_18/moving_variance': 'InceptionResnetV2/Repeat/block35_2/Branch_0/Conv2d_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_19/kernel': 'InceptionResnetV2/Repeat/block35_2/Branch_1/Conv2d_0a_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_19/beta': 'InceptionResnetV2/Repeat/block35_2/Branch_1/Conv2d_0a_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_19/moving_mean': 'InceptionResnetV2/Repeat/block35_2/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_19/moving_variance': 'InceptionResnetV2/Repeat/block35_2/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_20/kernel': 'InceptionResnetV2/Repeat/block35_2/Branch_1/Conv2d_0b_3x3/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_20/beta': 'InceptionResnetV2/Repeat/block35_2/Branch_1/Conv2d_0b_3x3/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_20/moving_mean': 'InceptionResnetV2/Repeat/block35_2/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_20/moving_variance': 'InceptionResnetV2/Repeat/block35_2/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_21/kernel': 'InceptionResnetV2/Repeat/block35_2/Branch_2/Conv2d_0a_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_21/beta': 'InceptionResnetV2/Repeat/block35_2/Branch_2/Conv2d_0a_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_21/moving_mean': 'InceptionResnetV2/Repeat/block35_2/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_21/moving_variance': 'InceptionResnetV2/Repeat/block35_2/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_22/kernel': 'InceptionResnetV2/Repeat/block35_2/Branch_2/Conv2d_0b_3x3/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_22/beta': 'InceptionResnetV2/Repeat/block35_2/Branch_2/Conv2d_0b_3x3/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_22/moving_mean': 'InceptionResnetV2/Repeat/block35_2/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_22/moving_variance': 'InceptionResnetV2/Repeat/block35_2/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_23/kernel': 'InceptionResnetV2/Repeat/block35_2/Branch_2/Conv2d_0c_3x3/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_23/beta': 'InceptionResnetV2/Repeat/block35_2/Branch_2/Conv2d_0c_3x3/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_23/moving_mean': 'InceptionResnetV2/Repeat/block35_2/Branch_2/Conv2d_0c_3x3/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_23/moving_variance': 'InceptionResnetV2/Repeat/block35_2/Branch_2/Conv2d_0c_3x3/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/block35_2_conv/kernel': 'InceptionResnetV2/Repeat/block35_2/Conv2d_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/block35_2_conv/bias': 'InceptionResnetV2/Repeat/block35_2/Conv2d_1x1/biases',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_24/kernel': 'InceptionResnetV2/Repeat/block35_3/Branch_0/Conv2d_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_24/beta': 'InceptionResnetV2/Repeat/block35_3/Branch_0/Conv2d_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_24/moving_mean': 'InceptionResnetV2/Repeat/block35_3/Branch_0/Conv2d_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_24/moving_variance': 'InceptionResnetV2/Repeat/block35_3/Branch_0/Conv2d_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_25/kernel': 'InceptionResnetV2/Repeat/block35_3/Branch_1/Conv2d_0a_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_25/beta': 'InceptionResnetV2/Repeat/block35_3/Branch_1/Conv2d_0a_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_25/moving_mean': 'InceptionResnetV2/Repeat/block35_3/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_25/moving_variance': 'InceptionResnetV2/Repeat/block35_3/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_26/kernel': 'InceptionResnetV2/Repeat/block35_3/Branch_1/Conv2d_0b_3x3/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_26/beta': 'InceptionResnetV2/Repeat/block35_3/Branch_1/Conv2d_0b_3x3/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_26/moving_mean': 'InceptionResnetV2/Repeat/block35_3/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_26/moving_variance': 'InceptionResnetV2/Repeat/block35_3/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_27/kernel': 'InceptionResnetV2/Repeat/block35_3/Branch_2/Conv2d_0a_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_27/beta': 'InceptionResnetV2/Repeat/block35_3/Branch_2/Conv2d_0a_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_27/moving_mean': 'InceptionResnetV2/Repeat/block35_3/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_27/moving_variance': 'InceptionResnetV2/Repeat/block35_3/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_28/kernel': 'InceptionResnetV2/Repeat/block35_3/Branch_2/Conv2d_0b_3x3/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_28/beta': 'InceptionResnetV2/Repeat/block35_3/Branch_2/Conv2d_0b_3x3/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_28/moving_mean': 'InceptionResnetV2/Repeat/block35_3/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_28/moving_variance': 'InceptionResnetV2/Repeat/block35_3/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_29/kernel': 'InceptionResnetV2/Repeat/block35_3/Branch_2/Conv2d_0c_3x3/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_29/beta': 'InceptionResnetV2/Repeat/block35_3/Branch_2/Conv2d_0c_3x3/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_29/moving_mean': 'InceptionResnetV2/Repeat/block35_3/Branch_2/Conv2d_0c_3x3/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_29/moving_variance': 'InceptionResnetV2/Repeat/block35_3/Branch_2/Conv2d_0c_3x3/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/block35_3_conv/kernel': 'InceptionResnetV2/Repeat/block35_3/Conv2d_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/block35_3_conv/bias': 'InceptionResnetV2/Repeat/block35_3/Conv2d_1x1/biases',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_30/kernel': 'InceptionResnetV2/Repeat/block35_4/Branch_0/Conv2d_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_30/beta': 'InceptionResnetV2/Repeat/block35_4/Branch_0/Conv2d_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_30/moving_mean': 'InceptionResnetV2/Repeat/block35_4/Branch_0/Conv2d_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_30/moving_variance': 'InceptionResnetV2/Repeat/block35_4/Branch_0/Conv2d_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_31/kernel': 'InceptionResnetV2/Repeat/block35_4/Branch_1/Conv2d_0a_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_31/beta': 'InceptionResnetV2/Repeat/block35_4/Branch_1/Conv2d_0a_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_31/moving_mean': 'InceptionResnetV2/Repeat/block35_4/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_31/moving_variance': 'InceptionResnetV2/Repeat/block35_4/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_32/kernel': 'InceptionResnetV2/Repeat/block35_4/Branch_1/Conv2d_0b_3x3/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_32/beta': 'InceptionResnetV2/Repeat/block35_4/Branch_1/Conv2d_0b_3x3/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_32/moving_mean': 'InceptionResnetV2/Repeat/block35_4/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_32/moving_variance': 'InceptionResnetV2/Repeat/block35_4/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_33/kernel': 'InceptionResnetV2/Repeat/block35_4/Branch_2/Conv2d_0a_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_33/beta': 'InceptionResnetV2/Repeat/block35_4/Branch_2/Conv2d_0a_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_33/moving_mean': 'InceptionResnetV2/Repeat/block35_4/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_33/moving_variance': 'InceptionResnetV2/Repeat/block35_4/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_34/kernel': 'InceptionResnetV2/Repeat/block35_4/Branch_2/Conv2d_0b_3x3/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_34/beta': 'InceptionResnetV2/Repeat/block35_4/Branch_2/Conv2d_0b_3x3/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_34/moving_mean': 'InceptionResnetV2/Repeat/block35_4/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_34/moving_variance': 'InceptionResnetV2/Repeat/block35_4/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_35/kernel': 'InceptionResnetV2/Repeat/block35_4/Branch_2/Conv2d_0c_3x3/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_35/beta': 'InceptionResnetV2/Repeat/block35_4/Branch_2/Conv2d_0c_3x3/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_35/moving_mean': 'InceptionResnetV2/Repeat/block35_4/Branch_2/Conv2d_0c_3x3/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_35/moving_variance': 'InceptionResnetV2/Repeat/block35_4/Branch_2/Conv2d_0c_3x3/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/block35_4_conv/kernel': 'InceptionResnetV2/Repeat/block35_4/Conv2d_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/block35_4_conv/bias': 'InceptionResnetV2/Repeat/block35_4/Conv2d_1x1/biases',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_36/kernel': 'InceptionResnetV2/Repeat/block35_5/Branch_0/Conv2d_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_36/beta': 'InceptionResnetV2/Repeat/block35_5/Branch_0/Conv2d_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_36/moving_mean': 'InceptionResnetV2/Repeat/block35_5/Branch_0/Conv2d_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_36/moving_variance': 'InceptionResnetV2/Repeat/block35_5/Branch_0/Conv2d_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_37/kernel': 'InceptionResnetV2/Repeat/block35_5/Branch_1/Conv2d_0a_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_37/beta': 'InceptionResnetV2/Repeat/block35_5/Branch_1/Conv2d_0a_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_37/moving_mean': 'InceptionResnetV2/Repeat/block35_5/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_37/moving_variance': 'InceptionResnetV2/Repeat/block35_5/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_38/kernel': 'InceptionResnetV2/Repeat/block35_5/Branch_1/Conv2d_0b_3x3/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_38/beta': 'InceptionResnetV2/Repeat/block35_5/Branch_1/Conv2d_0b_3x3/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_38/moving_mean': 'InceptionResnetV2/Repeat/block35_5/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_38/moving_variance': 'InceptionResnetV2/Repeat/block35_5/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_39/kernel': 'InceptionResnetV2/Repeat/block35_5/Branch_2/Conv2d_0a_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_39/beta': 'InceptionResnetV2/Repeat/block35_5/Branch_2/Conv2d_0a_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_39/moving_mean': 'InceptionResnetV2/Repeat/block35_5/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_39/moving_variance': 'InceptionResnetV2/Repeat/block35_5/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_40/kernel': 'InceptionResnetV2/Repeat/block35_5/Branch_2/Conv2d_0b_3x3/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_40/beta': 'InceptionResnetV2/Repeat/block35_5/Branch_2/Conv2d_0b_3x3/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_40/moving_mean': 'InceptionResnetV2/Repeat/block35_5/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_40/moving_variance': 'InceptionResnetV2/Repeat/block35_5/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_41/kernel': 'InceptionResnetV2/Repeat/block35_5/Branch_2/Conv2d_0c_3x3/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_41/beta': 'InceptionResnetV2/Repeat/block35_5/Branch_2/Conv2d_0c_3x3/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_41/moving_mean': 'InceptionResnetV2/Repeat/block35_5/Branch_2/Conv2d_0c_3x3/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_41/moving_variance': 'InceptionResnetV2/Repeat/block35_5/Branch_2/Conv2d_0c_3x3/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/block35_5_conv/kernel': 'InceptionResnetV2/Repeat/block35_5/Conv2d_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/block35_5_conv/bias': 'InceptionResnetV2/Repeat/block35_5/Conv2d_1x1/biases',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_42/kernel': 'InceptionResnetV2/Repeat/block35_6/Branch_0/Conv2d_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_42/beta': 'InceptionResnetV2/Repeat/block35_6/Branch_0/Conv2d_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_42/moving_mean': 'InceptionResnetV2/Repeat/block35_6/Branch_0/Conv2d_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_42/moving_variance': 'InceptionResnetV2/Repeat/block35_6/Branch_0/Conv2d_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_43/kernel': 'InceptionResnetV2/Repeat/block35_6/Branch_1/Conv2d_0a_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_43/beta': 'InceptionResnetV2/Repeat/block35_6/Branch_1/Conv2d_0a_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_43/moving_mean': 'InceptionResnetV2/Repeat/block35_6/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_43/moving_variance': 'InceptionResnetV2/Repeat/block35_6/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_44/kernel': 'InceptionResnetV2/Repeat/block35_6/Branch_1/Conv2d_0b_3x3/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_44/beta': 'InceptionResnetV2/Repeat/block35_6/Branch_1/Conv2d_0b_3x3/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_44/moving_mean': 'InceptionResnetV2/Repeat/block35_6/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_44/moving_variance': 'InceptionResnetV2/Repeat/block35_6/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_45/kernel': 'InceptionResnetV2/Repeat/block35_6/Branch_2/Conv2d_0a_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_45/beta': 'InceptionResnetV2/Repeat/block35_6/Branch_2/Conv2d_0a_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_45/moving_mean': 'InceptionResnetV2/Repeat/block35_6/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_45/moving_variance': 'InceptionResnetV2/Repeat/block35_6/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_46/kernel': 'InceptionResnetV2/Repeat/block35_6/Branch_2/Conv2d_0b_3x3/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_46/beta': 'InceptionResnetV2/Repeat/block35_6/Branch_2/Conv2d_0b_3x3/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_46/moving_mean': 'InceptionResnetV2/Repeat/block35_6/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_46/moving_variance': 'InceptionResnetV2/Repeat/block35_6/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_47/kernel': 'InceptionResnetV2/Repeat/block35_6/Branch_2/Conv2d_0c_3x3/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_47/beta': 'InceptionResnetV2/Repeat/block35_6/Branch_2/Conv2d_0c_3x3/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_47/moving_mean': 'InceptionResnetV2/Repeat/block35_6/Branch_2/Conv2d_0c_3x3/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_47/moving_variance': 'InceptionResnetV2/Repeat/block35_6/Branch_2/Conv2d_0c_3x3/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/block35_6_conv/kernel': 'InceptionResnetV2/Repeat/block35_6/Conv2d_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/block35_6_conv/bias': 'InceptionResnetV2/Repeat/block35_6/Conv2d_1x1/biases',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_48/kernel': 'InceptionResnetV2/Repeat/block35_7/Branch_0/Conv2d_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_48/beta': 'InceptionResnetV2/Repeat/block35_7/Branch_0/Conv2d_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_48/moving_mean': 'InceptionResnetV2/Repeat/block35_7/Branch_0/Conv2d_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_48/moving_variance': 'InceptionResnetV2/Repeat/block35_7/Branch_0/Conv2d_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_49/kernel': 'InceptionResnetV2/Repeat/block35_7/Branch_1/Conv2d_0a_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_49/beta': 'InceptionResnetV2/Repeat/block35_7/Branch_1/Conv2d_0a_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_49/moving_mean': 'InceptionResnetV2/Repeat/block35_7/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_49/moving_variance': 'InceptionResnetV2/Repeat/block35_7/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_50/kernel': 'InceptionResnetV2/Repeat/block35_7/Branch_1/Conv2d_0b_3x3/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_50/beta': 'InceptionResnetV2/Repeat/block35_7/Branch_1/Conv2d_0b_3x3/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_50/moving_mean': 'InceptionResnetV2/Repeat/block35_7/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_50/moving_variance': 'InceptionResnetV2/Repeat/block35_7/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_51/kernel': 'InceptionResnetV2/Repeat/block35_7/Branch_2/Conv2d_0a_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_51/beta': 'InceptionResnetV2/Repeat/block35_7/Branch_2/Conv2d_0a_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_51/moving_mean': 'InceptionResnetV2/Repeat/block35_7/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_51/moving_variance': 'InceptionResnetV2/Repeat/block35_7/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_52/kernel': 'InceptionResnetV2/Repeat/block35_7/Branch_2/Conv2d_0b_3x3/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_52/beta': 'InceptionResnetV2/Repeat/block35_7/Branch_2/Conv2d_0b_3x3/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_52/moving_mean': 'InceptionResnetV2/Repeat/block35_7/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_52/moving_variance': 'InceptionResnetV2/Repeat/block35_7/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_53/kernel': 'InceptionResnetV2/Repeat/block35_7/Branch_2/Conv2d_0c_3x3/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_53/beta': 'InceptionResnetV2/Repeat/block35_7/Branch_2/Conv2d_0c_3x3/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_53/moving_mean': 'InceptionResnetV2/Repeat/block35_7/Branch_2/Conv2d_0c_3x3/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_53/moving_variance': 'InceptionResnetV2/Repeat/block35_7/Branch_2/Conv2d_0c_3x3/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/block35_7_conv/kernel': 'InceptionResnetV2/Repeat/block35_7/Conv2d_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/block35_7_conv/bias': 'InceptionResnetV2/Repeat/block35_7/Conv2d_1x1/biases',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_54/kernel': 'InceptionResnetV2/Repeat/block35_8/Branch_0/Conv2d_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_54/beta': 'InceptionResnetV2/Repeat/block35_8/Branch_0/Conv2d_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_54/moving_mean': 'InceptionResnetV2/Repeat/block35_8/Branch_0/Conv2d_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_54/moving_variance': 'InceptionResnetV2/Repeat/block35_8/Branch_0/Conv2d_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_55/kernel': 'InceptionResnetV2/Repeat/block35_8/Branch_1/Conv2d_0a_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_55/beta': 'InceptionResnetV2/Repeat/block35_8/Branch_1/Conv2d_0a_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_55/moving_mean': 'InceptionResnetV2/Repeat/block35_8/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_55/moving_variance': 'InceptionResnetV2/Repeat/block35_8/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_56/kernel': 'InceptionResnetV2/Repeat/block35_8/Branch_1/Conv2d_0b_3x3/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_56/beta': 'InceptionResnetV2/Repeat/block35_8/Branch_1/Conv2d_0b_3x3/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_56/moving_mean': 'InceptionResnetV2/Repeat/block35_8/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_56/moving_variance': 'InceptionResnetV2/Repeat/block35_8/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_57/kernel': 'InceptionResnetV2/Repeat/block35_8/Branch_2/Conv2d_0a_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_57/beta': 'InceptionResnetV2/Repeat/block35_8/Branch_2/Conv2d_0a_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_57/moving_mean': 'InceptionResnetV2/Repeat/block35_8/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_57/moving_variance': 'InceptionResnetV2/Repeat/block35_8/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_58/kernel': 'InceptionResnetV2/Repeat/block35_8/Branch_2/Conv2d_0b_3x3/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_58/beta': 'InceptionResnetV2/Repeat/block35_8/Branch_2/Conv2d_0b_3x3/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_58/moving_mean': 'InceptionResnetV2/Repeat/block35_8/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_58/moving_variance': 'InceptionResnetV2/Repeat/block35_8/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_59/kernel': 'InceptionResnetV2/Repeat/block35_8/Branch_2/Conv2d_0c_3x3/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_59/beta': 'InceptionResnetV2/Repeat/block35_8/Branch_2/Conv2d_0c_3x3/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_59/moving_mean': 'InceptionResnetV2/Repeat/block35_8/Branch_2/Conv2d_0c_3x3/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_59/moving_variance': 'InceptionResnetV2/Repeat/block35_8/Branch_2/Conv2d_0c_3x3/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/block35_8_conv/kernel': 'InceptionResnetV2/Repeat/block35_8/Conv2d_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/block35_8_conv/bias': 'InceptionResnetV2/Repeat/block35_8/Conv2d_1x1/biases',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_60/kernel': 'InceptionResnetV2/Repeat/block35_9/Branch_0/Conv2d_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_60/beta': 'InceptionResnetV2/Repeat/block35_9/Branch_0/Conv2d_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_60/moving_mean': 'InceptionResnetV2/Repeat/block35_9/Branch_0/Conv2d_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_60/moving_variance': 'InceptionResnetV2/Repeat/block35_9/Branch_0/Conv2d_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_61/kernel': 'InceptionResnetV2/Repeat/block35_9/Branch_1/Conv2d_0a_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_61/beta': 'InceptionResnetV2/Repeat/block35_9/Branch_1/Conv2d_0a_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_61/moving_mean': 'InceptionResnetV2/Repeat/block35_9/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_61/moving_variance': 'InceptionResnetV2/Repeat/block35_9/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_62/kernel': 'InceptionResnetV2/Repeat/block35_9/Branch_1/Conv2d_0b_3x3/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_62/beta': 'InceptionResnetV2/Repeat/block35_9/Branch_1/Conv2d_0b_3x3/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_62/moving_mean': 'InceptionResnetV2/Repeat/block35_9/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_62/moving_variance': 'InceptionResnetV2/Repeat/block35_9/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_63/kernel': 'InceptionResnetV2/Repeat/block35_9/Branch_2/Conv2d_0a_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_63/beta': 'InceptionResnetV2/Repeat/block35_9/Branch_2/Conv2d_0a_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_63/moving_mean': 'InceptionResnetV2/Repeat/block35_9/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_63/moving_variance': 'InceptionResnetV2/Repeat/block35_9/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_64/kernel': 'InceptionResnetV2/Repeat/block35_9/Branch_2/Conv2d_0b_3x3/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_64/beta': 'InceptionResnetV2/Repeat/block35_9/Branch_2/Conv2d_0b_3x3/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_64/moving_mean': 'InceptionResnetV2/Repeat/block35_9/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_64/moving_variance': 'InceptionResnetV2/Repeat/block35_9/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_65/kernel': 'InceptionResnetV2/Repeat/block35_9/Branch_2/Conv2d_0c_3x3/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_65/beta': 'InceptionResnetV2/Repeat/block35_9/Branch_2/Conv2d_0c_3x3/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_65/moving_mean': 'InceptionResnetV2/Repeat/block35_9/Branch_2/Conv2d_0c_3x3/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_65/moving_variance': 'InceptionResnetV2/Repeat/block35_9/Branch_2/Conv2d_0c_3x3/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/block35_9_conv/kernel': 'InceptionResnetV2/Repeat/block35_9/Conv2d_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/block35_9_conv/bias': 'InceptionResnetV2/Repeat/block35_9/Conv2d_1x1/biases',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_66/kernel': 'InceptionResnetV2/Repeat/block35_10/Branch_0/Conv2d_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_66/beta': 'InceptionResnetV2/Repeat/block35_10/Branch_0/Conv2d_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_66/moving_mean': 'InceptionResnetV2/Repeat/block35_10/Branch_0/Conv2d_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_66/moving_variance': 'InceptionResnetV2/Repeat/block35_10/Branch_0/Conv2d_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_67/kernel': 'InceptionResnetV2/Repeat/block35_10/Branch_1/Conv2d_0a_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_67/beta': 'InceptionResnetV2/Repeat/block35_10/Branch_1/Conv2d_0a_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_67/moving_mean': 'InceptionResnetV2/Repeat/block35_10/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_67/moving_variance': 'InceptionResnetV2/Repeat/block35_10/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_68/kernel': 'InceptionResnetV2/Repeat/block35_10/Branch_1/Conv2d_0b_3x3/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_68/beta': 'InceptionResnetV2/Repeat/block35_10/Branch_1/Conv2d_0b_3x3/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_68/moving_mean': 'InceptionResnetV2/Repeat/block35_10/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_68/moving_variance': 'InceptionResnetV2/Repeat/block35_10/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_69/kernel': 'InceptionResnetV2/Repeat/block35_10/Branch_2/Conv2d_0a_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_69/beta': 'InceptionResnetV2/Repeat/block35_10/Branch_2/Conv2d_0a_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_69/moving_mean': 'InceptionResnetV2/Repeat/block35_10/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_69/moving_variance': 'InceptionResnetV2/Repeat/block35_10/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_70/kernel': 'InceptionResnetV2/Repeat/block35_10/Branch_2/Conv2d_0b_3x3/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_70/beta': 'InceptionResnetV2/Repeat/block35_10/Branch_2/Conv2d_0b_3x3/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_70/moving_mean': 'InceptionResnetV2/Repeat/block35_10/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_70/moving_variance': 'InceptionResnetV2/Repeat/block35_10/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_71/kernel': 'InceptionResnetV2/Repeat/block35_10/Branch_2/Conv2d_0c_3x3/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_71/beta': 'InceptionResnetV2/Repeat/block35_10/Branch_2/Conv2d_0c_3x3/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_71/moving_mean': 'InceptionResnetV2/Repeat/block35_10/Branch_2/Conv2d_0c_3x3/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_71/moving_variance': 'InceptionResnetV2/Repeat/block35_10/Branch_2/Conv2d_0c_3x3/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/block35_10_conv/kernel': 'InceptionResnetV2/Repeat/block35_10/Conv2d_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/block35_10_conv/bias': 'InceptionResnetV2/Repeat/block35_10/Conv2d_1x1/biases',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_72/kernel': 'InceptionResnetV2/Mixed_6a/Branch_0/Conv2d_1a_3x3/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_72/beta': 'InceptionResnetV2/Mixed_6a/Branch_0/Conv2d_1a_3x3/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_72/moving_mean': 'InceptionResnetV2/Mixed_6a/Branch_0/Conv2d_1a_3x3/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_72/moving_variance': 'InceptionResnetV2/Mixed_6a/Branch_0/Conv2d_1a_3x3/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_73/kernel': 'InceptionResnetV2/Mixed_6a/Branch_1/Conv2d_0a_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_73/beta': 'InceptionResnetV2/Mixed_6a/Branch_1/Conv2d_0a_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_73/moving_mean': 'InceptionResnetV2/Mixed_6a/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_73/moving_variance': 'InceptionResnetV2/Mixed_6a/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_74/kernel': 'InceptionResnetV2/Mixed_6a/Branch_1/Conv2d_0b_3x3/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_74/beta': 'InceptionResnetV2/Mixed_6a/Branch_1/Conv2d_0b_3x3/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_74/moving_mean': 'InceptionResnetV2/Mixed_6a/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_74/moving_variance': 'InceptionResnetV2/Mixed_6a/Branch_1/Conv2d_0b_3x3/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_75/kernel': 'InceptionResnetV2/Mixed_6a/Branch_1/Conv2d_1a_3x3/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_75/beta': 'InceptionResnetV2/Mixed_6a/Branch_1/Conv2d_1a_3x3/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_75/moving_mean': 'InceptionResnetV2/Mixed_6a/Branch_1/Conv2d_1a_3x3/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_75/moving_variance': 'InceptionResnetV2/Mixed_6a/Branch_1/Conv2d_1a_3x3/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_76/kernel': 'InceptionResnetV2/Repeat_1/block17_1/Branch_0/Conv2d_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_76/beta': 'InceptionResnetV2/Repeat_1/block17_1/Branch_0/Conv2d_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_76/moving_mean': 'InceptionResnetV2/Repeat_1/block17_1/Branch_0/Conv2d_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_76/moving_variance': 'InceptionResnetV2/Repeat_1/block17_1/Branch_0/Conv2d_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_77/kernel': 'InceptionResnetV2/Repeat_1/block17_1/Branch_1/Conv2d_0a_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_77/beta': 'InceptionResnetV2/Repeat_1/block17_1/Branch_1/Conv2d_0a_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_77/moving_mean': 'InceptionResnetV2/Repeat_1/block17_1/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_77/moving_variance': 'InceptionResnetV2/Repeat_1/block17_1/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_78/kernel': 'InceptionResnetV2/Repeat_1/block17_1/Branch_1/Conv2d_0b_1x7/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_78/beta': 'InceptionResnetV2/Repeat_1/block17_1/Branch_1/Conv2d_0b_1x7/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_78/moving_mean': 'InceptionResnetV2/Repeat_1/block17_1/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_78/moving_variance': 'InceptionResnetV2/Repeat_1/block17_1/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_79/kernel': 'InceptionResnetV2/Repeat_1/block17_1/Branch_1/Conv2d_0c_7x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_79/beta': 'InceptionResnetV2/Repeat_1/block17_1/Branch_1/Conv2d_0c_7x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_79/moving_mean': 'InceptionResnetV2/Repeat_1/block17_1/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_79/moving_variance': 'InceptionResnetV2/Repeat_1/block17_1/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/block17_1_conv/kernel': 'InceptionResnetV2/Repeat_1/block17_1/Conv2d_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/block17_1_conv/bias': 'InceptionResnetV2/Repeat_1/block17_1/Conv2d_1x1/biases',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_80/kernel': 'InceptionResnetV2/Repeat_1/block17_2/Branch_0/Conv2d_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_80/beta': 'InceptionResnetV2/Repeat_1/block17_2/Branch_0/Conv2d_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_80/moving_mean': 'InceptionResnetV2/Repeat_1/block17_2/Branch_0/Conv2d_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_80/moving_variance': 'InceptionResnetV2/Repeat_1/block17_2/Branch_0/Conv2d_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_81/kernel': 'InceptionResnetV2/Repeat_1/block17_2/Branch_1/Conv2d_0a_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_81/beta': 'InceptionResnetV2/Repeat_1/block17_2/Branch_1/Conv2d_0a_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_81/moving_mean': 'InceptionResnetV2/Repeat_1/block17_2/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_81/moving_variance': 'InceptionResnetV2/Repeat_1/block17_2/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_82/kernel': 'InceptionResnetV2/Repeat_1/block17_2/Branch_1/Conv2d_0b_1x7/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_82/beta': 'InceptionResnetV2/Repeat_1/block17_2/Branch_1/Conv2d_0b_1x7/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_82/moving_mean': 'InceptionResnetV2/Repeat_1/block17_2/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_82/moving_variance': 'InceptionResnetV2/Repeat_1/block17_2/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_83/kernel': 'InceptionResnetV2/Repeat_1/block17_2/Branch_1/Conv2d_0c_7x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_83/beta': 'InceptionResnetV2/Repeat_1/block17_2/Branch_1/Conv2d_0c_7x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_83/moving_mean': 'InceptionResnetV2/Repeat_1/block17_2/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_83/moving_variance': 'InceptionResnetV2/Repeat_1/block17_2/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/block17_2_conv/kernel': 'InceptionResnetV2/Repeat_1/block17_2/Conv2d_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/block17_2_conv/bias': 'InceptionResnetV2/Repeat_1/block17_2/Conv2d_1x1/biases',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_84/kernel': 'InceptionResnetV2/Repeat_1/block17_3/Branch_0/Conv2d_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_84/beta': 'InceptionResnetV2/Repeat_1/block17_3/Branch_0/Conv2d_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_84/moving_mean': 'InceptionResnetV2/Repeat_1/block17_3/Branch_0/Conv2d_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_84/moving_variance': 'InceptionResnetV2/Repeat_1/block17_3/Branch_0/Conv2d_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_85/kernel': 'InceptionResnetV2/Repeat_1/block17_3/Branch_1/Conv2d_0a_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_85/beta': 'InceptionResnetV2/Repeat_1/block17_3/Branch_1/Conv2d_0a_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_85/moving_mean': 'InceptionResnetV2/Repeat_1/block17_3/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_85/moving_variance': 'InceptionResnetV2/Repeat_1/block17_3/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_86/kernel': 'InceptionResnetV2/Repeat_1/block17_3/Branch_1/Conv2d_0b_1x7/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_86/beta': 'InceptionResnetV2/Repeat_1/block17_3/Branch_1/Conv2d_0b_1x7/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_86/moving_mean': 'InceptionResnetV2/Repeat_1/block17_3/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_86/moving_variance': 'InceptionResnetV2/Repeat_1/block17_3/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_87/kernel': 'InceptionResnetV2/Repeat_1/block17_3/Branch_1/Conv2d_0c_7x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_87/beta': 'InceptionResnetV2/Repeat_1/block17_3/Branch_1/Conv2d_0c_7x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_87/moving_mean': 'InceptionResnetV2/Repeat_1/block17_3/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_87/moving_variance': 'InceptionResnetV2/Repeat_1/block17_3/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/block17_3_conv/kernel': 'InceptionResnetV2/Repeat_1/block17_3/Conv2d_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/block17_3_conv/bias': 'InceptionResnetV2/Repeat_1/block17_3/Conv2d_1x1/biases',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_88/kernel': 'InceptionResnetV2/Repeat_1/block17_4/Branch_0/Conv2d_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_88/beta': 'InceptionResnetV2/Repeat_1/block17_4/Branch_0/Conv2d_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_88/moving_mean': 'InceptionResnetV2/Repeat_1/block17_4/Branch_0/Conv2d_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_88/moving_variance': 'InceptionResnetV2/Repeat_1/block17_4/Branch_0/Conv2d_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_89/kernel': 'InceptionResnetV2/Repeat_1/block17_4/Branch_1/Conv2d_0a_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_89/beta': 'InceptionResnetV2/Repeat_1/block17_4/Branch_1/Conv2d_0a_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_89/moving_mean': 'InceptionResnetV2/Repeat_1/block17_4/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_89/moving_variance': 'InceptionResnetV2/Repeat_1/block17_4/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_90/kernel': 'InceptionResnetV2/Repeat_1/block17_4/Branch_1/Conv2d_0b_1x7/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_90/beta': 'InceptionResnetV2/Repeat_1/block17_4/Branch_1/Conv2d_0b_1x7/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_90/moving_mean': 'InceptionResnetV2/Repeat_1/block17_4/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_90/moving_variance': 'InceptionResnetV2/Repeat_1/block17_4/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_91/kernel': 'InceptionResnetV2/Repeat_1/block17_4/Branch_1/Conv2d_0c_7x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_91/beta': 'InceptionResnetV2/Repeat_1/block17_4/Branch_1/Conv2d_0c_7x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_91/moving_mean': 'InceptionResnetV2/Repeat_1/block17_4/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_91/moving_variance': 'InceptionResnetV2/Repeat_1/block17_4/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/block17_4_conv/kernel': 'InceptionResnetV2/Repeat_1/block17_4/Conv2d_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/block17_4_conv/bias': 'InceptionResnetV2/Repeat_1/block17_4/Conv2d_1x1/biases',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_92/kernel': 'InceptionResnetV2/Repeat_1/block17_5/Branch_0/Conv2d_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_92/beta': 'InceptionResnetV2/Repeat_1/block17_5/Branch_0/Conv2d_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_92/moving_mean': 'InceptionResnetV2/Repeat_1/block17_5/Branch_0/Conv2d_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_92/moving_variance': 'InceptionResnetV2/Repeat_1/block17_5/Branch_0/Conv2d_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_93/kernel': 'InceptionResnetV2/Repeat_1/block17_5/Branch_1/Conv2d_0a_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_93/beta': 'InceptionResnetV2/Repeat_1/block17_5/Branch_1/Conv2d_0a_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_93/moving_mean': 'InceptionResnetV2/Repeat_1/block17_5/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_93/moving_variance': 'InceptionResnetV2/Repeat_1/block17_5/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_94/kernel': 'InceptionResnetV2/Repeat_1/block17_5/Branch_1/Conv2d_0b_1x7/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_94/beta': 'InceptionResnetV2/Repeat_1/block17_5/Branch_1/Conv2d_0b_1x7/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_94/moving_mean': 'InceptionResnetV2/Repeat_1/block17_5/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_94/moving_variance': 'InceptionResnetV2/Repeat_1/block17_5/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_95/kernel': 'InceptionResnetV2/Repeat_1/block17_5/Branch_1/Conv2d_0c_7x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_95/beta': 'InceptionResnetV2/Repeat_1/block17_5/Branch_1/Conv2d_0c_7x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_95/moving_mean': 'InceptionResnetV2/Repeat_1/block17_5/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_95/moving_variance': 'InceptionResnetV2/Repeat_1/block17_5/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/block17_5_conv/kernel': 'InceptionResnetV2/Repeat_1/block17_5/Conv2d_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/block17_5_conv/bias': 'InceptionResnetV2/Repeat_1/block17_5/Conv2d_1x1/biases',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_96/kernel': 'InceptionResnetV2/Repeat_1/block17_6/Branch_0/Conv2d_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_96/beta': 'InceptionResnetV2/Repeat_1/block17_6/Branch_0/Conv2d_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_96/moving_mean': 'InceptionResnetV2/Repeat_1/block17_6/Branch_0/Conv2d_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_96/moving_variance': 'InceptionResnetV2/Repeat_1/block17_6/Branch_0/Conv2d_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_97/kernel': 'InceptionResnetV2/Repeat_1/block17_6/Branch_1/Conv2d_0a_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_97/beta': 'InceptionResnetV2/Repeat_1/block17_6/Branch_1/Conv2d_0a_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_97/moving_mean': 'InceptionResnetV2/Repeat_1/block17_6/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_97/moving_variance': 'InceptionResnetV2/Repeat_1/block17_6/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_98/kernel': 'InceptionResnetV2/Repeat_1/block17_6/Branch_1/Conv2d_0b_1x7/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_98/beta': 'InceptionResnetV2/Repeat_1/block17_6/Branch_1/Conv2d_0b_1x7/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_98/moving_mean': 'InceptionResnetV2/Repeat_1/block17_6/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_98/moving_variance': 'InceptionResnetV2/Repeat_1/block17_6/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_99/kernel': 'InceptionResnetV2/Repeat_1/block17_6/Branch_1/Conv2d_0c_7x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_99/beta': 'InceptionResnetV2/Repeat_1/block17_6/Branch_1/Conv2d_0c_7x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_99/moving_mean': 'InceptionResnetV2/Repeat_1/block17_6/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_99/moving_variance': 'InceptionResnetV2/Repeat_1/block17_6/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/block17_6_conv/kernel': 'InceptionResnetV2/Repeat_1/block17_6/Conv2d_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/block17_6_conv/bias': 'InceptionResnetV2/Repeat_1/block17_6/Conv2d_1x1/biases',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_100/kernel': 'InceptionResnetV2/Repeat_1/block17_7/Branch_0/Conv2d_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_100/beta': 'InceptionResnetV2/Repeat_1/block17_7/Branch_0/Conv2d_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_100/moving_mean': 'InceptionResnetV2/Repeat_1/block17_7/Branch_0/Conv2d_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_100/moving_variance': 'InceptionResnetV2/Repeat_1/block17_7/Branch_0/Conv2d_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_101/kernel': 'InceptionResnetV2/Repeat_1/block17_7/Branch_1/Conv2d_0a_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_101/beta': 'InceptionResnetV2/Repeat_1/block17_7/Branch_1/Conv2d_0a_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_101/moving_mean': 'InceptionResnetV2/Repeat_1/block17_7/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_101/moving_variance': 'InceptionResnetV2/Repeat_1/block17_7/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_102/kernel': 'InceptionResnetV2/Repeat_1/block17_7/Branch_1/Conv2d_0b_1x7/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_102/beta': 'InceptionResnetV2/Repeat_1/block17_7/Branch_1/Conv2d_0b_1x7/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_102/moving_mean': 'InceptionResnetV2/Repeat_1/block17_7/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_102/moving_variance': 'InceptionResnetV2/Repeat_1/block17_7/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_103/kernel': 'InceptionResnetV2/Repeat_1/block17_7/Branch_1/Conv2d_0c_7x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_103/beta': 'InceptionResnetV2/Repeat_1/block17_7/Branch_1/Conv2d_0c_7x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_103/moving_mean': 'InceptionResnetV2/Repeat_1/block17_7/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_103/moving_variance': 'InceptionResnetV2/Repeat_1/block17_7/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/block17_7_conv/kernel': 'InceptionResnetV2/Repeat_1/block17_7/Conv2d_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/block17_7_conv/bias': 'InceptionResnetV2/Repeat_1/block17_7/Conv2d_1x1/biases',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_104/kernel': 'InceptionResnetV2/Repeat_1/block17_8/Branch_0/Conv2d_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_104/beta': 'InceptionResnetV2/Repeat_1/block17_8/Branch_0/Conv2d_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_104/moving_mean': 'InceptionResnetV2/Repeat_1/block17_8/Branch_0/Conv2d_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_104/moving_variance': 'InceptionResnetV2/Repeat_1/block17_8/Branch_0/Conv2d_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_105/kernel': 'InceptionResnetV2/Repeat_1/block17_8/Branch_1/Conv2d_0a_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_105/beta': 'InceptionResnetV2/Repeat_1/block17_8/Branch_1/Conv2d_0a_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_105/moving_mean': 'InceptionResnetV2/Repeat_1/block17_8/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_105/moving_variance': 'InceptionResnetV2/Repeat_1/block17_8/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_106/kernel': 'InceptionResnetV2/Repeat_1/block17_8/Branch_1/Conv2d_0b_1x7/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_106/beta': 'InceptionResnetV2/Repeat_1/block17_8/Branch_1/Conv2d_0b_1x7/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_106/moving_mean': 'InceptionResnetV2/Repeat_1/block17_8/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_106/moving_variance': 'InceptionResnetV2/Repeat_1/block17_8/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_107/kernel': 'InceptionResnetV2/Repeat_1/block17_8/Branch_1/Conv2d_0c_7x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_107/beta': 'InceptionResnetV2/Repeat_1/block17_8/Branch_1/Conv2d_0c_7x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_107/moving_mean': 'InceptionResnetV2/Repeat_1/block17_8/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_107/moving_variance': 'InceptionResnetV2/Repeat_1/block17_8/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/block17_8_conv/kernel': 'InceptionResnetV2/Repeat_1/block17_8/Conv2d_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/block17_8_conv/bias': 'InceptionResnetV2/Repeat_1/block17_8/Conv2d_1x1/biases',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_108/kernel': 'InceptionResnetV2/Repeat_1/block17_9/Branch_0/Conv2d_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_108/beta': 'InceptionResnetV2/Repeat_1/block17_9/Branch_0/Conv2d_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_108/moving_mean': 'InceptionResnetV2/Repeat_1/block17_9/Branch_0/Conv2d_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_108/moving_variance': 'InceptionResnetV2/Repeat_1/block17_9/Branch_0/Conv2d_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_109/kernel': 'InceptionResnetV2/Repeat_1/block17_9/Branch_1/Conv2d_0a_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_109/beta': 'InceptionResnetV2/Repeat_1/block17_9/Branch_1/Conv2d_0a_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_109/moving_mean': 'InceptionResnetV2/Repeat_1/block17_9/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_109/moving_variance': 'InceptionResnetV2/Repeat_1/block17_9/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_110/kernel': 'InceptionResnetV2/Repeat_1/block17_9/Branch_1/Conv2d_0b_1x7/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_110/beta': 'InceptionResnetV2/Repeat_1/block17_9/Branch_1/Conv2d_0b_1x7/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_110/moving_mean': 'InceptionResnetV2/Repeat_1/block17_9/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_110/moving_variance': 'InceptionResnetV2/Repeat_1/block17_9/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_111/kernel': 'InceptionResnetV2/Repeat_1/block17_9/Branch_1/Conv2d_0c_7x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_111/beta': 'InceptionResnetV2/Repeat_1/block17_9/Branch_1/Conv2d_0c_7x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_111/moving_mean': 'InceptionResnetV2/Repeat_1/block17_9/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_111/moving_variance': 'InceptionResnetV2/Repeat_1/block17_9/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/block17_9_conv/kernel': 'InceptionResnetV2/Repeat_1/block17_9/Conv2d_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/block17_9_conv/bias': 'InceptionResnetV2/Repeat_1/block17_9/Conv2d_1x1/biases',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_112/kernel': 'InceptionResnetV2/Repeat_1/block17_10/Branch_0/Conv2d_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_112/beta': 'InceptionResnetV2/Repeat_1/block17_10/Branch_0/Conv2d_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_112/moving_mean': 'InceptionResnetV2/Repeat_1/block17_10/Branch_0/Conv2d_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_112/moving_variance': 'InceptionResnetV2/Repeat_1/block17_10/Branch_0/Conv2d_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_113/kernel': 'InceptionResnetV2/Repeat_1/block17_10/Branch_1/Conv2d_0a_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_113/beta': 'InceptionResnetV2/Repeat_1/block17_10/Branch_1/Conv2d_0a_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_113/moving_mean': 'InceptionResnetV2/Repeat_1/block17_10/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_113/moving_variance': 'InceptionResnetV2/Repeat_1/block17_10/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_114/kernel': 'InceptionResnetV2/Repeat_1/block17_10/Branch_1/Conv2d_0b_1x7/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_114/beta': 'InceptionResnetV2/Repeat_1/block17_10/Branch_1/Conv2d_0b_1x7/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_114/moving_mean': 'InceptionResnetV2/Repeat_1/block17_10/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_114/moving_variance': 'InceptionResnetV2/Repeat_1/block17_10/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_115/kernel': 'InceptionResnetV2/Repeat_1/block17_10/Branch_1/Conv2d_0c_7x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_115/beta': 'InceptionResnetV2/Repeat_1/block17_10/Branch_1/Conv2d_0c_7x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_115/moving_mean': 'InceptionResnetV2/Repeat_1/block17_10/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_115/moving_variance': 'InceptionResnetV2/Repeat_1/block17_10/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/block17_10_conv/kernel': 'InceptionResnetV2/Repeat_1/block17_10/Conv2d_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/block17_10_conv/bias': 'InceptionResnetV2/Repeat_1/block17_10/Conv2d_1x1/biases',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_116/kernel': 'InceptionResnetV2/Repeat_1/block17_11/Branch_0/Conv2d_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_116/beta': 'InceptionResnetV2/Repeat_1/block17_11/Branch_0/Conv2d_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_116/moving_mean': 'InceptionResnetV2/Repeat_1/block17_11/Branch_0/Conv2d_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_116/moving_variance': 'InceptionResnetV2/Repeat_1/block17_11/Branch_0/Conv2d_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_117/kernel': 'InceptionResnetV2/Repeat_1/block17_11/Branch_1/Conv2d_0a_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_117/beta': 'InceptionResnetV2/Repeat_1/block17_11/Branch_1/Conv2d_0a_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_117/moving_mean': 'InceptionResnetV2/Repeat_1/block17_11/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_117/moving_variance': 'InceptionResnetV2/Repeat_1/block17_11/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_118/kernel': 'InceptionResnetV2/Repeat_1/block17_11/Branch_1/Conv2d_0b_1x7/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_118/beta': 'InceptionResnetV2/Repeat_1/block17_11/Branch_1/Conv2d_0b_1x7/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_118/moving_mean': 'InceptionResnetV2/Repeat_1/block17_11/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_118/moving_variance': 'InceptionResnetV2/Repeat_1/block17_11/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_119/kernel': 'InceptionResnetV2/Repeat_1/block17_11/Branch_1/Conv2d_0c_7x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_119/beta': 'InceptionResnetV2/Repeat_1/block17_11/Branch_1/Conv2d_0c_7x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_119/moving_mean': 'InceptionResnetV2/Repeat_1/block17_11/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_119/moving_variance': 'InceptionResnetV2/Repeat_1/block17_11/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/block17_11_conv/kernel': 'InceptionResnetV2/Repeat_1/block17_11/Conv2d_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/block17_11_conv/bias': 'InceptionResnetV2/Repeat_1/block17_11/Conv2d_1x1/biases',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_120/kernel': 'InceptionResnetV2/Repeat_1/block17_12/Branch_0/Conv2d_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_120/beta': 'InceptionResnetV2/Repeat_1/block17_12/Branch_0/Conv2d_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_120/moving_mean': 'InceptionResnetV2/Repeat_1/block17_12/Branch_0/Conv2d_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_120/moving_variance': 'InceptionResnetV2/Repeat_1/block17_12/Branch_0/Conv2d_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_121/kernel': 'InceptionResnetV2/Repeat_1/block17_12/Branch_1/Conv2d_0a_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_121/beta': 'InceptionResnetV2/Repeat_1/block17_12/Branch_1/Conv2d_0a_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_121/moving_mean': 'InceptionResnetV2/Repeat_1/block17_12/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_121/moving_variance': 'InceptionResnetV2/Repeat_1/block17_12/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_122/kernel': 'InceptionResnetV2/Repeat_1/block17_12/Branch_1/Conv2d_0b_1x7/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_122/beta': 'InceptionResnetV2/Repeat_1/block17_12/Branch_1/Conv2d_0b_1x7/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_122/moving_mean': 'InceptionResnetV2/Repeat_1/block17_12/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_122/moving_variance': 'InceptionResnetV2/Repeat_1/block17_12/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_123/kernel': 'InceptionResnetV2/Repeat_1/block17_12/Branch_1/Conv2d_0c_7x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_123/beta': 'InceptionResnetV2/Repeat_1/block17_12/Branch_1/Conv2d_0c_7x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_123/moving_mean': 'InceptionResnetV2/Repeat_1/block17_12/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_123/moving_variance': 'InceptionResnetV2/Repeat_1/block17_12/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/block17_12_conv/kernel': 'InceptionResnetV2/Repeat_1/block17_12/Conv2d_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/block17_12_conv/bias': 'InceptionResnetV2/Repeat_1/block17_12/Conv2d_1x1/biases',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_124/kernel': 'InceptionResnetV2/Repeat_1/block17_13/Branch_0/Conv2d_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_124/beta': 'InceptionResnetV2/Repeat_1/block17_13/Branch_0/Conv2d_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_124/moving_mean': 'InceptionResnetV2/Repeat_1/block17_13/Branch_0/Conv2d_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_124/moving_variance': 'InceptionResnetV2/Repeat_1/block17_13/Branch_0/Conv2d_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_125/kernel': 'InceptionResnetV2/Repeat_1/block17_13/Branch_1/Conv2d_0a_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_125/beta': 'InceptionResnetV2/Repeat_1/block17_13/Branch_1/Conv2d_0a_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_125/moving_mean': 'InceptionResnetV2/Repeat_1/block17_13/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_125/moving_variance': 'InceptionResnetV2/Repeat_1/block17_13/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_126/kernel': 'InceptionResnetV2/Repeat_1/block17_13/Branch_1/Conv2d_0b_1x7/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_126/beta': 'InceptionResnetV2/Repeat_1/block17_13/Branch_1/Conv2d_0b_1x7/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_126/moving_mean': 'InceptionResnetV2/Repeat_1/block17_13/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_126/moving_variance': 'InceptionResnetV2/Repeat_1/block17_13/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_127/kernel': 'InceptionResnetV2/Repeat_1/block17_13/Branch_1/Conv2d_0c_7x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_127/beta': 'InceptionResnetV2/Repeat_1/block17_13/Branch_1/Conv2d_0c_7x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_127/moving_mean': 'InceptionResnetV2/Repeat_1/block17_13/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_127/moving_variance': 'InceptionResnetV2/Repeat_1/block17_13/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/block17_13_conv/kernel': 'InceptionResnetV2/Repeat_1/block17_13/Conv2d_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/block17_13_conv/bias': 'InceptionResnetV2/Repeat_1/block17_13/Conv2d_1x1/biases',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_128/kernel': 'InceptionResnetV2/Repeat_1/block17_14/Branch_0/Conv2d_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_128/beta': 'InceptionResnetV2/Repeat_1/block17_14/Branch_0/Conv2d_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_128/moving_mean': 'InceptionResnetV2/Repeat_1/block17_14/Branch_0/Conv2d_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_128/moving_variance': 'InceptionResnetV2/Repeat_1/block17_14/Branch_0/Conv2d_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_129/kernel': 'InceptionResnetV2/Repeat_1/block17_14/Branch_1/Conv2d_0a_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_129/beta': 'InceptionResnetV2/Repeat_1/block17_14/Branch_1/Conv2d_0a_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_129/moving_mean': 'InceptionResnetV2/Repeat_1/block17_14/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_129/moving_variance': 'InceptionResnetV2/Repeat_1/block17_14/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_130/kernel': 'InceptionResnetV2/Repeat_1/block17_14/Branch_1/Conv2d_0b_1x7/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_130/beta': 'InceptionResnetV2/Repeat_1/block17_14/Branch_1/Conv2d_0b_1x7/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_130/moving_mean': 'InceptionResnetV2/Repeat_1/block17_14/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_130/moving_variance': 'InceptionResnetV2/Repeat_1/block17_14/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_131/kernel': 'InceptionResnetV2/Repeat_1/block17_14/Branch_1/Conv2d_0c_7x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_131/beta': 'InceptionResnetV2/Repeat_1/block17_14/Branch_1/Conv2d_0c_7x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_131/moving_mean': 'InceptionResnetV2/Repeat_1/block17_14/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_131/moving_variance': 'InceptionResnetV2/Repeat_1/block17_14/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/block17_14_conv/kernel': 'InceptionResnetV2/Repeat_1/block17_14/Conv2d_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/block17_14_conv/bias': 'InceptionResnetV2/Repeat_1/block17_14/Conv2d_1x1/biases',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_132/kernel': 'InceptionResnetV2/Repeat_1/block17_15/Branch_0/Conv2d_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_132/beta': 'InceptionResnetV2/Repeat_1/block17_15/Branch_0/Conv2d_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_132/moving_mean': 'InceptionResnetV2/Repeat_1/block17_15/Branch_0/Conv2d_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_132/moving_variance': 'InceptionResnetV2/Repeat_1/block17_15/Branch_0/Conv2d_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_133/kernel': 'InceptionResnetV2/Repeat_1/block17_15/Branch_1/Conv2d_0a_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_133/beta': 'InceptionResnetV2/Repeat_1/block17_15/Branch_1/Conv2d_0a_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_133/moving_mean': 'InceptionResnetV2/Repeat_1/block17_15/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_133/moving_variance': 'InceptionResnetV2/Repeat_1/block17_15/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_134/kernel': 'InceptionResnetV2/Repeat_1/block17_15/Branch_1/Conv2d_0b_1x7/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_134/beta': 'InceptionResnetV2/Repeat_1/block17_15/Branch_1/Conv2d_0b_1x7/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_134/moving_mean': 'InceptionResnetV2/Repeat_1/block17_15/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_134/moving_variance': 'InceptionResnetV2/Repeat_1/block17_15/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_135/kernel': 'InceptionResnetV2/Repeat_1/block17_15/Branch_1/Conv2d_0c_7x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_135/beta': 'InceptionResnetV2/Repeat_1/block17_15/Branch_1/Conv2d_0c_7x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_135/moving_mean': 'InceptionResnetV2/Repeat_1/block17_15/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_135/moving_variance': 'InceptionResnetV2/Repeat_1/block17_15/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/block17_15_conv/kernel': 'InceptionResnetV2/Repeat_1/block17_15/Conv2d_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/block17_15_conv/bias': 'InceptionResnetV2/Repeat_1/block17_15/Conv2d_1x1/biases',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_136/kernel': 'InceptionResnetV2/Repeat_1/block17_16/Branch_0/Conv2d_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_136/beta': 'InceptionResnetV2/Repeat_1/block17_16/Branch_0/Conv2d_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_136/moving_mean': 'InceptionResnetV2/Repeat_1/block17_16/Branch_0/Conv2d_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_136/moving_variance': 'InceptionResnetV2/Repeat_1/block17_16/Branch_0/Conv2d_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_137/kernel': 'InceptionResnetV2/Repeat_1/block17_16/Branch_1/Conv2d_0a_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_137/beta': 'InceptionResnetV2/Repeat_1/block17_16/Branch_1/Conv2d_0a_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_137/moving_mean': 'InceptionResnetV2/Repeat_1/block17_16/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_137/moving_variance': 'InceptionResnetV2/Repeat_1/block17_16/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_138/kernel': 'InceptionResnetV2/Repeat_1/block17_16/Branch_1/Conv2d_0b_1x7/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_138/beta': 'InceptionResnetV2/Repeat_1/block17_16/Branch_1/Conv2d_0b_1x7/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_138/moving_mean': 'InceptionResnetV2/Repeat_1/block17_16/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_138/moving_variance': 'InceptionResnetV2/Repeat_1/block17_16/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_139/kernel': 'InceptionResnetV2/Repeat_1/block17_16/Branch_1/Conv2d_0c_7x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_139/beta': 'InceptionResnetV2/Repeat_1/block17_16/Branch_1/Conv2d_0c_7x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_139/moving_mean': 'InceptionResnetV2/Repeat_1/block17_16/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_139/moving_variance': 'InceptionResnetV2/Repeat_1/block17_16/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/block17_16_conv/kernel': 'InceptionResnetV2/Repeat_1/block17_16/Conv2d_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/block17_16_conv/bias': 'InceptionResnetV2/Repeat_1/block17_16/Conv2d_1x1/biases',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_140/kernel': 'InceptionResnetV2/Repeat_1/block17_17/Branch_0/Conv2d_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_140/beta': 'InceptionResnetV2/Repeat_1/block17_17/Branch_0/Conv2d_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_140/moving_mean': 'InceptionResnetV2/Repeat_1/block17_17/Branch_0/Conv2d_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_140/moving_variance': 'InceptionResnetV2/Repeat_1/block17_17/Branch_0/Conv2d_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_141/kernel': 'InceptionResnetV2/Repeat_1/block17_17/Branch_1/Conv2d_0a_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_141/beta': 'InceptionResnetV2/Repeat_1/block17_17/Branch_1/Conv2d_0a_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_141/moving_mean': 'InceptionResnetV2/Repeat_1/block17_17/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_141/moving_variance': 'InceptionResnetV2/Repeat_1/block17_17/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_142/kernel': 'InceptionResnetV2/Repeat_1/block17_17/Branch_1/Conv2d_0b_1x7/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_142/beta': 'InceptionResnetV2/Repeat_1/block17_17/Branch_1/Conv2d_0b_1x7/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_142/moving_mean': 'InceptionResnetV2/Repeat_1/block17_17/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_142/moving_variance': 'InceptionResnetV2/Repeat_1/block17_17/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_143/kernel': 'InceptionResnetV2/Repeat_1/block17_17/Branch_1/Conv2d_0c_7x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_143/beta': 'InceptionResnetV2/Repeat_1/block17_17/Branch_1/Conv2d_0c_7x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_143/moving_mean': 'InceptionResnetV2/Repeat_1/block17_17/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_143/moving_variance': 'InceptionResnetV2/Repeat_1/block17_17/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/block17_17_conv/kernel': 'InceptionResnetV2/Repeat_1/block17_17/Conv2d_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/block17_17_conv/bias': 'InceptionResnetV2/Repeat_1/block17_17/Conv2d_1x1/biases',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_144/kernel': 'InceptionResnetV2/Repeat_1/block17_18/Branch_0/Conv2d_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_144/beta': 'InceptionResnetV2/Repeat_1/block17_18/Branch_0/Conv2d_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_144/moving_mean': 'InceptionResnetV2/Repeat_1/block17_18/Branch_0/Conv2d_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_144/moving_variance': 'InceptionResnetV2/Repeat_1/block17_18/Branch_0/Conv2d_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_145/kernel': 'InceptionResnetV2/Repeat_1/block17_18/Branch_1/Conv2d_0a_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_145/beta': 'InceptionResnetV2/Repeat_1/block17_18/Branch_1/Conv2d_0a_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_145/moving_mean': 'InceptionResnetV2/Repeat_1/block17_18/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_145/moving_variance': 'InceptionResnetV2/Repeat_1/block17_18/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_146/kernel': 'InceptionResnetV2/Repeat_1/block17_18/Branch_1/Conv2d_0b_1x7/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_146/beta': 'InceptionResnetV2/Repeat_1/block17_18/Branch_1/Conv2d_0b_1x7/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_146/moving_mean': 'InceptionResnetV2/Repeat_1/block17_18/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_146/moving_variance': 'InceptionResnetV2/Repeat_1/block17_18/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_147/kernel': 'InceptionResnetV2/Repeat_1/block17_18/Branch_1/Conv2d_0c_7x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_147/beta': 'InceptionResnetV2/Repeat_1/block17_18/Branch_1/Conv2d_0c_7x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_147/moving_mean': 'InceptionResnetV2/Repeat_1/block17_18/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_147/moving_variance': 'InceptionResnetV2/Repeat_1/block17_18/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/block17_18_conv/kernel': 'InceptionResnetV2/Repeat_1/block17_18/Conv2d_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/block17_18_conv/bias': 'InceptionResnetV2/Repeat_1/block17_18/Conv2d_1x1/biases',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_148/kernel': 'InceptionResnetV2/Repeat_1/block17_19/Branch_0/Conv2d_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_148/beta': 'InceptionResnetV2/Repeat_1/block17_19/Branch_0/Conv2d_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_148/moving_mean': 'InceptionResnetV2/Repeat_1/block17_19/Branch_0/Conv2d_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_148/moving_variance': 'InceptionResnetV2/Repeat_1/block17_19/Branch_0/Conv2d_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_149/kernel': 'InceptionResnetV2/Repeat_1/block17_19/Branch_1/Conv2d_0a_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_149/beta': 'InceptionResnetV2/Repeat_1/block17_19/Branch_1/Conv2d_0a_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_149/moving_mean': 'InceptionResnetV2/Repeat_1/block17_19/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_149/moving_variance': 'InceptionResnetV2/Repeat_1/block17_19/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_150/kernel': 'InceptionResnetV2/Repeat_1/block17_19/Branch_1/Conv2d_0b_1x7/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_150/beta': 'InceptionResnetV2/Repeat_1/block17_19/Branch_1/Conv2d_0b_1x7/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_150/moving_mean': 'InceptionResnetV2/Repeat_1/block17_19/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_150/moving_variance': 'InceptionResnetV2/Repeat_1/block17_19/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_151/kernel': 'InceptionResnetV2/Repeat_1/block17_19/Branch_1/Conv2d_0c_7x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_151/beta': 'InceptionResnetV2/Repeat_1/block17_19/Branch_1/Conv2d_0c_7x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_151/moving_mean': 'InceptionResnetV2/Repeat_1/block17_19/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_151/moving_variance': 'InceptionResnetV2/Repeat_1/block17_19/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/block17_19_conv/kernel': 'InceptionResnetV2/Repeat_1/block17_19/Conv2d_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/block17_19_conv/bias': 'InceptionResnetV2/Repeat_1/block17_19/Conv2d_1x1/biases',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_152/kernel': 'InceptionResnetV2/Repeat_1/block17_20/Branch_0/Conv2d_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_152/beta': 'InceptionResnetV2/Repeat_1/block17_20/Branch_0/Conv2d_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_152/moving_mean': 'InceptionResnetV2/Repeat_1/block17_20/Branch_0/Conv2d_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_152/moving_variance': 'InceptionResnetV2/Repeat_1/block17_20/Branch_0/Conv2d_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_153/kernel': 'InceptionResnetV2/Repeat_1/block17_20/Branch_1/Conv2d_0a_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_153/beta': 'InceptionResnetV2/Repeat_1/block17_20/Branch_1/Conv2d_0a_1x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_153/moving_mean': 'InceptionResnetV2/Repeat_1/block17_20/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_153/moving_variance': 'InceptionResnetV2/Repeat_1/block17_20/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_154/kernel': 'InceptionResnetV2/Repeat_1/block17_20/Branch_1/Conv2d_0b_1x7/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_154/beta': 'InceptionResnetV2/Repeat_1/block17_20/Branch_1/Conv2d_0b_1x7/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_154/moving_mean': 'InceptionResnetV2/Repeat_1/block17_20/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_154/moving_variance': 'InceptionResnetV2/Repeat_1/block17_20/Branch_1/Conv2d_0b_1x7/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/conv2d_155/kernel': 'InceptionResnetV2/Repeat_1/block17_20/Branch_1/Conv2d_0c_7x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_155/beta': 'InceptionResnetV2/Repeat_1/block17_20/Branch_1/Conv2d_0c_7x1/BatchNorm/beta',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_155/moving_mean': 'InceptionResnetV2/Repeat_1/block17_20/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_mean',
- 'FirstStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_155/moving_variance': 'InceptionResnetV2/Repeat_1/block17_20/Branch_1/Conv2d_0c_7x1/BatchNorm/moving_variance',
- 'FirstStageFeatureExtractor/InceptionResnetV2/block17_20_conv/kernel': 'InceptionResnetV2/Repeat_1/block17_20/Conv2d_1x1/weights',
- 'FirstStageFeatureExtractor/InceptionResnetV2/block17_20_conv/bias': 'InceptionResnetV2/Repeat_1/block17_20/Conv2d_1x1/biases',
- 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_359/kernel': 'InceptionResnetV2/Mixed_7a/Branch_0/Conv2d_0a_1x1/weights',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_359/beta': 'InceptionResnetV2/Mixed_7a/Branch_0/Conv2d_0a_1x1/BatchNorm/beta',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_359/moving_mean': 'InceptionResnetV2/Mixed_7a/Branch_0/Conv2d_0a_1x1/BatchNorm/moving_mean',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_359/moving_variance': 'InceptionResnetV2/Mixed_7a/Branch_0/Conv2d_0a_1x1/BatchNorm/moving_variance',
- 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_360/kernel': 'InceptionResnetV2/Mixed_7a/Branch_0/Conv2d_1a_3x3/weights',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_360/beta': 'InceptionResnetV2/Mixed_7a/Branch_0/Conv2d_1a_3x3/BatchNorm/beta',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_360/moving_mean': 'InceptionResnetV2/Mixed_7a/Branch_0/Conv2d_1a_3x3/BatchNorm/moving_mean',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_360/moving_variance': 'InceptionResnetV2/Mixed_7a/Branch_0/Conv2d_1a_3x3/BatchNorm/moving_variance',
- 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_361/kernel': 'InceptionResnetV2/Mixed_7a/Branch_1/Conv2d_0a_1x1/weights',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_361/beta': 'InceptionResnetV2/Mixed_7a/Branch_1/Conv2d_0a_1x1/BatchNorm/beta',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_361/moving_mean': 'InceptionResnetV2/Mixed_7a/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_361/moving_variance': 'InceptionResnetV2/Mixed_7a/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance',
- 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_362/kernel': 'InceptionResnetV2/Mixed_7a/Branch_1/Conv2d_1a_3x3/weights',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_362/beta': 'InceptionResnetV2/Mixed_7a/Branch_1/Conv2d_1a_3x3/BatchNorm/beta',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_362/moving_mean': 'InceptionResnetV2/Mixed_7a/Branch_1/Conv2d_1a_3x3/BatchNorm/moving_mean',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_362/moving_variance': 'InceptionResnetV2/Mixed_7a/Branch_1/Conv2d_1a_3x3/BatchNorm/moving_variance',
- 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_363/kernel': 'InceptionResnetV2/Mixed_7a/Branch_2/Conv2d_0a_1x1/weights',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_363/beta': 'InceptionResnetV2/Mixed_7a/Branch_2/Conv2d_0a_1x1/BatchNorm/beta',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_363/moving_mean': 'InceptionResnetV2/Mixed_7a/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_mean',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_363/moving_variance': 'InceptionResnetV2/Mixed_7a/Branch_2/Conv2d_0a_1x1/BatchNorm/moving_variance',
- 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_364/kernel': 'InceptionResnetV2/Mixed_7a/Branch_2/Conv2d_0b_3x3/weights',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_364/beta': 'InceptionResnetV2/Mixed_7a/Branch_2/Conv2d_0b_3x3/BatchNorm/beta',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_364/moving_mean': 'InceptionResnetV2/Mixed_7a/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_mean',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_364/moving_variance': 'InceptionResnetV2/Mixed_7a/Branch_2/Conv2d_0b_3x3/BatchNorm/moving_variance',
- 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_365/kernel': 'InceptionResnetV2/Mixed_7a/Branch_2/Conv2d_1a_3x3/weights',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_365/beta': 'InceptionResnetV2/Mixed_7a/Branch_2/Conv2d_1a_3x3/BatchNorm/beta',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_365/moving_mean': 'InceptionResnetV2/Mixed_7a/Branch_2/Conv2d_1a_3x3/BatchNorm/moving_mean',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_365/moving_variance': 'InceptionResnetV2/Mixed_7a/Branch_2/Conv2d_1a_3x3/BatchNorm/moving_variance',
- 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_366/kernel': 'InceptionResnetV2/Repeat_2/block8_1/Branch_0/Conv2d_1x1/weights',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_366/beta': 'InceptionResnetV2/Repeat_2/block8_1/Branch_0/Conv2d_1x1/BatchNorm/beta',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_366/moving_mean': 'InceptionResnetV2/Repeat_2/block8_1/Branch_0/Conv2d_1x1/BatchNorm/moving_mean',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_366/moving_variance': 'InceptionResnetV2/Repeat_2/block8_1/Branch_0/Conv2d_1x1/BatchNorm/moving_variance',
- 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_367/kernel': 'InceptionResnetV2/Repeat_2/block8_1/Branch_1/Conv2d_0a_1x1/weights',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_367/beta': 'InceptionResnetV2/Repeat_2/block8_1/Branch_1/Conv2d_0a_1x1/BatchNorm/beta',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_367/moving_mean': 'InceptionResnetV2/Repeat_2/block8_1/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_367/moving_variance': 'InceptionResnetV2/Repeat_2/block8_1/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance',
- 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_368/kernel': 'InceptionResnetV2/Repeat_2/block8_1/Branch_1/Conv2d_0b_1x3/weights',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_368/beta': 'InceptionResnetV2/Repeat_2/block8_1/Branch_1/Conv2d_0b_1x3/BatchNorm/beta',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_368/moving_mean': 'InceptionResnetV2/Repeat_2/block8_1/Branch_1/Conv2d_0b_1x3/BatchNorm/moving_mean',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_368/moving_variance': 'InceptionResnetV2/Repeat_2/block8_1/Branch_1/Conv2d_0b_1x3/BatchNorm/moving_variance',
- 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_369/kernel': 'InceptionResnetV2/Repeat_2/block8_1/Branch_1/Conv2d_0c_3x1/weights',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_369/beta': 'InceptionResnetV2/Repeat_2/block8_1/Branch_1/Conv2d_0c_3x1/BatchNorm/beta',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_369/moving_mean': 'InceptionResnetV2/Repeat_2/block8_1/Branch_1/Conv2d_0c_3x1/BatchNorm/moving_mean',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_369/moving_variance': 'InceptionResnetV2/Repeat_2/block8_1/Branch_1/Conv2d_0c_3x1/BatchNorm/moving_variance',
- 'SecondStageFeatureExtractor/InceptionResnetV2/block8_1_conv/kernel': 'InceptionResnetV2/Repeat_2/block8_1/Conv2d_1x1/weights',
- 'SecondStageFeatureExtractor/InceptionResnetV2/block8_1_conv/bias': 'InceptionResnetV2/Repeat_2/block8_1/Conv2d_1x1/biases',
- 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_370/kernel': 'InceptionResnetV2/Repeat_2/block8_2/Branch_0/Conv2d_1x1/weights',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_370/beta': 'InceptionResnetV2/Repeat_2/block8_2/Branch_0/Conv2d_1x1/BatchNorm/beta',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_370/moving_mean': 'InceptionResnetV2/Repeat_2/block8_2/Branch_0/Conv2d_1x1/BatchNorm/moving_mean',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_370/moving_variance': 'InceptionResnetV2/Repeat_2/block8_2/Branch_0/Conv2d_1x1/BatchNorm/moving_variance',
- 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_371/kernel': 'InceptionResnetV2/Repeat_2/block8_2/Branch_1/Conv2d_0a_1x1/weights',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_371/beta': 'InceptionResnetV2/Repeat_2/block8_2/Branch_1/Conv2d_0a_1x1/BatchNorm/beta',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_371/moving_mean': 'InceptionResnetV2/Repeat_2/block8_2/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_371/moving_variance': 'InceptionResnetV2/Repeat_2/block8_2/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance',
- 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_372/kernel': 'InceptionResnetV2/Repeat_2/block8_2/Branch_1/Conv2d_0b_1x3/weights',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_372/beta': 'InceptionResnetV2/Repeat_2/block8_2/Branch_1/Conv2d_0b_1x3/BatchNorm/beta',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_372/moving_mean': 'InceptionResnetV2/Repeat_2/block8_2/Branch_1/Conv2d_0b_1x3/BatchNorm/moving_mean',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_372/moving_variance': 'InceptionResnetV2/Repeat_2/block8_2/Branch_1/Conv2d_0b_1x3/BatchNorm/moving_variance',
- 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_373/kernel': 'InceptionResnetV2/Repeat_2/block8_2/Branch_1/Conv2d_0c_3x1/weights',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_373/beta': 'InceptionResnetV2/Repeat_2/block8_2/Branch_1/Conv2d_0c_3x1/BatchNorm/beta',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_373/moving_mean': 'InceptionResnetV2/Repeat_2/block8_2/Branch_1/Conv2d_0c_3x1/BatchNorm/moving_mean',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_373/moving_variance': 'InceptionResnetV2/Repeat_2/block8_2/Branch_1/Conv2d_0c_3x1/BatchNorm/moving_variance',
- 'SecondStageFeatureExtractor/InceptionResnetV2/block8_2_conv/kernel': 'InceptionResnetV2/Repeat_2/block8_2/Conv2d_1x1/weights',
- 'SecondStageFeatureExtractor/InceptionResnetV2/block8_2_conv/bias': 'InceptionResnetV2/Repeat_2/block8_2/Conv2d_1x1/biases',
- 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_374/kernel': 'InceptionResnetV2/Repeat_2/block8_3/Branch_0/Conv2d_1x1/weights',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_374/beta': 'InceptionResnetV2/Repeat_2/block8_3/Branch_0/Conv2d_1x1/BatchNorm/beta',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_374/moving_mean': 'InceptionResnetV2/Repeat_2/block8_3/Branch_0/Conv2d_1x1/BatchNorm/moving_mean',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_374/moving_variance': 'InceptionResnetV2/Repeat_2/block8_3/Branch_0/Conv2d_1x1/BatchNorm/moving_variance',
- 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_375/kernel': 'InceptionResnetV2/Repeat_2/block8_3/Branch_1/Conv2d_0a_1x1/weights',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_375/beta': 'InceptionResnetV2/Repeat_2/block8_3/Branch_1/Conv2d_0a_1x1/BatchNorm/beta',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_375/moving_mean': 'InceptionResnetV2/Repeat_2/block8_3/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_375/moving_variance': 'InceptionResnetV2/Repeat_2/block8_3/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance',
- 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_376/kernel': 'InceptionResnetV2/Repeat_2/block8_3/Branch_1/Conv2d_0b_1x3/weights',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_376/beta': 'InceptionResnetV2/Repeat_2/block8_3/Branch_1/Conv2d_0b_1x3/BatchNorm/beta',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_376/moving_mean': 'InceptionResnetV2/Repeat_2/block8_3/Branch_1/Conv2d_0b_1x3/BatchNorm/moving_mean',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_376/moving_variance': 'InceptionResnetV2/Repeat_2/block8_3/Branch_1/Conv2d_0b_1x3/BatchNorm/moving_variance',
- 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_377/kernel': 'InceptionResnetV2/Repeat_2/block8_3/Branch_1/Conv2d_0c_3x1/weights',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_377/beta': 'InceptionResnetV2/Repeat_2/block8_3/Branch_1/Conv2d_0c_3x1/BatchNorm/beta',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_377/moving_mean': 'InceptionResnetV2/Repeat_2/block8_3/Branch_1/Conv2d_0c_3x1/BatchNorm/moving_mean',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_377/moving_variance': 'InceptionResnetV2/Repeat_2/block8_3/Branch_1/Conv2d_0c_3x1/BatchNorm/moving_variance',
- 'SecondStageFeatureExtractor/InceptionResnetV2/block8_3_conv/kernel': 'InceptionResnetV2/Repeat_2/block8_3/Conv2d_1x1/weights',
- 'SecondStageFeatureExtractor/InceptionResnetV2/block8_3_conv/bias': 'InceptionResnetV2/Repeat_2/block8_3/Conv2d_1x1/biases',
- 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_378/kernel': 'InceptionResnetV2/Repeat_2/block8_4/Branch_0/Conv2d_1x1/weights',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_378/beta': 'InceptionResnetV2/Repeat_2/block8_4/Branch_0/Conv2d_1x1/BatchNorm/beta',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_378/moving_mean': 'InceptionResnetV2/Repeat_2/block8_4/Branch_0/Conv2d_1x1/BatchNorm/moving_mean',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_378/moving_variance': 'InceptionResnetV2/Repeat_2/block8_4/Branch_0/Conv2d_1x1/BatchNorm/moving_variance',
- 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_379/kernel': 'InceptionResnetV2/Repeat_2/block8_4/Branch_1/Conv2d_0a_1x1/weights',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_379/beta': 'InceptionResnetV2/Repeat_2/block8_4/Branch_1/Conv2d_0a_1x1/BatchNorm/beta',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_379/moving_mean': 'InceptionResnetV2/Repeat_2/block8_4/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_379/moving_variance': 'InceptionResnetV2/Repeat_2/block8_4/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance',
- 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_380/kernel': 'InceptionResnetV2/Repeat_2/block8_4/Branch_1/Conv2d_0b_1x3/weights',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_380/beta': 'InceptionResnetV2/Repeat_2/block8_4/Branch_1/Conv2d_0b_1x3/BatchNorm/beta',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_380/moving_mean': 'InceptionResnetV2/Repeat_2/block8_4/Branch_1/Conv2d_0b_1x3/BatchNorm/moving_mean',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_380/moving_variance': 'InceptionResnetV2/Repeat_2/block8_4/Branch_1/Conv2d_0b_1x3/BatchNorm/moving_variance',
- 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_381/kernel': 'InceptionResnetV2/Repeat_2/block8_4/Branch_1/Conv2d_0c_3x1/weights',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_381/beta': 'InceptionResnetV2/Repeat_2/block8_4/Branch_1/Conv2d_0c_3x1/BatchNorm/beta',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_381/moving_mean': 'InceptionResnetV2/Repeat_2/block8_4/Branch_1/Conv2d_0c_3x1/BatchNorm/moving_mean',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_381/moving_variance': 'InceptionResnetV2/Repeat_2/block8_4/Branch_1/Conv2d_0c_3x1/BatchNorm/moving_variance',
- 'SecondStageFeatureExtractor/InceptionResnetV2/block8_4_conv/kernel': 'InceptionResnetV2/Repeat_2/block8_4/Conv2d_1x1/weights',
- 'SecondStageFeatureExtractor/InceptionResnetV2/block8_4_conv/bias': 'InceptionResnetV2/Repeat_2/block8_4/Conv2d_1x1/biases',
- 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_382/kernel': 'InceptionResnetV2/Repeat_2/block8_5/Branch_0/Conv2d_1x1/weights',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_382/beta': 'InceptionResnetV2/Repeat_2/block8_5/Branch_0/Conv2d_1x1/BatchNorm/beta',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_382/moving_mean': 'InceptionResnetV2/Repeat_2/block8_5/Branch_0/Conv2d_1x1/BatchNorm/moving_mean',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_382/moving_variance': 'InceptionResnetV2/Repeat_2/block8_5/Branch_0/Conv2d_1x1/BatchNorm/moving_variance',
- 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_383/kernel': 'InceptionResnetV2/Repeat_2/block8_5/Branch_1/Conv2d_0a_1x1/weights',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_383/beta': 'InceptionResnetV2/Repeat_2/block8_5/Branch_1/Conv2d_0a_1x1/BatchNorm/beta',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_383/moving_mean': 'InceptionResnetV2/Repeat_2/block8_5/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_383/moving_variance': 'InceptionResnetV2/Repeat_2/block8_5/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance',
- 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_384/kernel': 'InceptionResnetV2/Repeat_2/block8_5/Branch_1/Conv2d_0b_1x3/weights',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_384/beta': 'InceptionResnetV2/Repeat_2/block8_5/Branch_1/Conv2d_0b_1x3/BatchNorm/beta',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_384/moving_mean': 'InceptionResnetV2/Repeat_2/block8_5/Branch_1/Conv2d_0b_1x3/BatchNorm/moving_mean',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_384/moving_variance': 'InceptionResnetV2/Repeat_2/block8_5/Branch_1/Conv2d_0b_1x3/BatchNorm/moving_variance',
- 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_385/kernel': 'InceptionResnetV2/Repeat_2/block8_5/Branch_1/Conv2d_0c_3x1/weights',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_385/beta': 'InceptionResnetV2/Repeat_2/block8_5/Branch_1/Conv2d_0c_3x1/BatchNorm/beta',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_385/moving_mean': 'InceptionResnetV2/Repeat_2/block8_5/Branch_1/Conv2d_0c_3x1/BatchNorm/moving_mean',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_385/moving_variance': 'InceptionResnetV2/Repeat_2/block8_5/Branch_1/Conv2d_0c_3x1/BatchNorm/moving_variance',
- 'SecondStageFeatureExtractor/InceptionResnetV2/block8_5_conv/kernel': 'InceptionResnetV2/Repeat_2/block8_5/Conv2d_1x1/weights',
- 'SecondStageFeatureExtractor/InceptionResnetV2/block8_5_conv/bias': 'InceptionResnetV2/Repeat_2/block8_5/Conv2d_1x1/biases',
- 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_386/kernel': 'InceptionResnetV2/Repeat_2/block8_6/Branch_0/Conv2d_1x1/weights',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_386/beta': 'InceptionResnetV2/Repeat_2/block8_6/Branch_0/Conv2d_1x1/BatchNorm/beta',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_386/moving_mean': 'InceptionResnetV2/Repeat_2/block8_6/Branch_0/Conv2d_1x1/BatchNorm/moving_mean',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_386/moving_variance': 'InceptionResnetV2/Repeat_2/block8_6/Branch_0/Conv2d_1x1/BatchNorm/moving_variance',
- 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_387/kernel': 'InceptionResnetV2/Repeat_2/block8_6/Branch_1/Conv2d_0a_1x1/weights',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_387/beta': 'InceptionResnetV2/Repeat_2/block8_6/Branch_1/Conv2d_0a_1x1/BatchNorm/beta',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_387/moving_mean': 'InceptionResnetV2/Repeat_2/block8_6/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_387/moving_variance': 'InceptionResnetV2/Repeat_2/block8_6/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance',
- 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_388/kernel': 'InceptionResnetV2/Repeat_2/block8_6/Branch_1/Conv2d_0b_1x3/weights',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_388/beta': 'InceptionResnetV2/Repeat_2/block8_6/Branch_1/Conv2d_0b_1x3/BatchNorm/beta',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_388/moving_mean': 'InceptionResnetV2/Repeat_2/block8_6/Branch_1/Conv2d_0b_1x3/BatchNorm/moving_mean',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_388/moving_variance': 'InceptionResnetV2/Repeat_2/block8_6/Branch_1/Conv2d_0b_1x3/BatchNorm/moving_variance',
- 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_389/kernel': 'InceptionResnetV2/Repeat_2/block8_6/Branch_1/Conv2d_0c_3x1/weights',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_389/beta': 'InceptionResnetV2/Repeat_2/block8_6/Branch_1/Conv2d_0c_3x1/BatchNorm/beta',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_389/moving_mean': 'InceptionResnetV2/Repeat_2/block8_6/Branch_1/Conv2d_0c_3x1/BatchNorm/moving_mean',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_389/moving_variance': 'InceptionResnetV2/Repeat_2/block8_6/Branch_1/Conv2d_0c_3x1/BatchNorm/moving_variance',
- 'SecondStageFeatureExtractor/InceptionResnetV2/block8_6_conv/kernel': 'InceptionResnetV2/Repeat_2/block8_6/Conv2d_1x1/weights',
- 'SecondStageFeatureExtractor/InceptionResnetV2/block8_6_conv/bias': 'InceptionResnetV2/Repeat_2/block8_6/Conv2d_1x1/biases',
- 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_390/kernel': 'InceptionResnetV2/Repeat_2/block8_7/Branch_0/Conv2d_1x1/weights',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_390/beta': 'InceptionResnetV2/Repeat_2/block8_7/Branch_0/Conv2d_1x1/BatchNorm/beta',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_390/moving_mean': 'InceptionResnetV2/Repeat_2/block8_7/Branch_0/Conv2d_1x1/BatchNorm/moving_mean',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_390/moving_variance': 'InceptionResnetV2/Repeat_2/block8_7/Branch_0/Conv2d_1x1/BatchNorm/moving_variance',
- 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_391/kernel': 'InceptionResnetV2/Repeat_2/block8_7/Branch_1/Conv2d_0a_1x1/weights',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_391/beta': 'InceptionResnetV2/Repeat_2/block8_7/Branch_1/Conv2d_0a_1x1/BatchNorm/beta',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_391/moving_mean': 'InceptionResnetV2/Repeat_2/block8_7/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_391/moving_variance': 'InceptionResnetV2/Repeat_2/block8_7/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance',
- 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_392/kernel': 'InceptionResnetV2/Repeat_2/block8_7/Branch_1/Conv2d_0b_1x3/weights',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_392/beta': 'InceptionResnetV2/Repeat_2/block8_7/Branch_1/Conv2d_0b_1x3/BatchNorm/beta',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_392/moving_mean': 'InceptionResnetV2/Repeat_2/block8_7/Branch_1/Conv2d_0b_1x3/BatchNorm/moving_mean',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_392/moving_variance': 'InceptionResnetV2/Repeat_2/block8_7/Branch_1/Conv2d_0b_1x3/BatchNorm/moving_variance',
- 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_393/kernel': 'InceptionResnetV2/Repeat_2/block8_7/Branch_1/Conv2d_0c_3x1/weights',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_393/beta': 'InceptionResnetV2/Repeat_2/block8_7/Branch_1/Conv2d_0c_3x1/BatchNorm/beta',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_393/moving_mean': 'InceptionResnetV2/Repeat_2/block8_7/Branch_1/Conv2d_0c_3x1/BatchNorm/moving_mean',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_393/moving_variance': 'InceptionResnetV2/Repeat_2/block8_7/Branch_1/Conv2d_0c_3x1/BatchNorm/moving_variance',
- 'SecondStageFeatureExtractor/InceptionResnetV2/block8_7_conv/kernel': 'InceptionResnetV2/Repeat_2/block8_7/Conv2d_1x1/weights',
- 'SecondStageFeatureExtractor/InceptionResnetV2/block8_7_conv/bias': 'InceptionResnetV2/Repeat_2/block8_7/Conv2d_1x1/biases',
- 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_394/kernel': 'InceptionResnetV2/Repeat_2/block8_8/Branch_0/Conv2d_1x1/weights',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_394/beta': 'InceptionResnetV2/Repeat_2/block8_8/Branch_0/Conv2d_1x1/BatchNorm/beta',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_394/moving_mean': 'InceptionResnetV2/Repeat_2/block8_8/Branch_0/Conv2d_1x1/BatchNorm/moving_mean',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_394/moving_variance': 'InceptionResnetV2/Repeat_2/block8_8/Branch_0/Conv2d_1x1/BatchNorm/moving_variance',
- 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_395/kernel': 'InceptionResnetV2/Repeat_2/block8_8/Branch_1/Conv2d_0a_1x1/weights',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_395/beta': 'InceptionResnetV2/Repeat_2/block8_8/Branch_1/Conv2d_0a_1x1/BatchNorm/beta',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_395/moving_mean': 'InceptionResnetV2/Repeat_2/block8_8/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_395/moving_variance': 'InceptionResnetV2/Repeat_2/block8_8/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance',
- 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_396/kernel': 'InceptionResnetV2/Repeat_2/block8_8/Branch_1/Conv2d_0b_1x3/weights',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_396/beta': 'InceptionResnetV2/Repeat_2/block8_8/Branch_1/Conv2d_0b_1x3/BatchNorm/beta',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_396/moving_mean': 'InceptionResnetV2/Repeat_2/block8_8/Branch_1/Conv2d_0b_1x3/BatchNorm/moving_mean',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_396/moving_variance': 'InceptionResnetV2/Repeat_2/block8_8/Branch_1/Conv2d_0b_1x3/BatchNorm/moving_variance',
- 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_397/kernel': 'InceptionResnetV2/Repeat_2/block8_8/Branch_1/Conv2d_0c_3x1/weights',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_397/beta': 'InceptionResnetV2/Repeat_2/block8_8/Branch_1/Conv2d_0c_3x1/BatchNorm/beta',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_397/moving_mean': 'InceptionResnetV2/Repeat_2/block8_8/Branch_1/Conv2d_0c_3x1/BatchNorm/moving_mean',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_397/moving_variance': 'InceptionResnetV2/Repeat_2/block8_8/Branch_1/Conv2d_0c_3x1/BatchNorm/moving_variance',
- 'SecondStageFeatureExtractor/InceptionResnetV2/block8_8_conv/kernel': 'InceptionResnetV2/Repeat_2/block8_8/Conv2d_1x1/weights',
- 'SecondStageFeatureExtractor/InceptionResnetV2/block8_8_conv/bias': 'InceptionResnetV2/Repeat_2/block8_8/Conv2d_1x1/biases',
- 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_398/kernel': 'InceptionResnetV2/Repeat_2/block8_9/Branch_0/Conv2d_1x1/weights',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_398/beta': 'InceptionResnetV2/Repeat_2/block8_9/Branch_0/Conv2d_1x1/BatchNorm/beta',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_398/moving_mean': 'InceptionResnetV2/Repeat_2/block8_9/Branch_0/Conv2d_1x1/BatchNorm/moving_mean',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_398/moving_variance': 'InceptionResnetV2/Repeat_2/block8_9/Branch_0/Conv2d_1x1/BatchNorm/moving_variance',
- 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_399/kernel': 'InceptionResnetV2/Repeat_2/block8_9/Branch_1/Conv2d_0a_1x1/weights',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_399/beta': 'InceptionResnetV2/Repeat_2/block8_9/Branch_1/Conv2d_0a_1x1/BatchNorm/beta',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_399/moving_mean': 'InceptionResnetV2/Repeat_2/block8_9/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_399/moving_variance': 'InceptionResnetV2/Repeat_2/block8_9/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance',
- 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_400/kernel': 'InceptionResnetV2/Repeat_2/block8_9/Branch_1/Conv2d_0b_1x3/weights',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_400/beta': 'InceptionResnetV2/Repeat_2/block8_9/Branch_1/Conv2d_0b_1x3/BatchNorm/beta',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_400/moving_mean': 'InceptionResnetV2/Repeat_2/block8_9/Branch_1/Conv2d_0b_1x3/BatchNorm/moving_mean',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_400/moving_variance': 'InceptionResnetV2/Repeat_2/block8_9/Branch_1/Conv2d_0b_1x3/BatchNorm/moving_variance',
- 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_401/kernel': 'InceptionResnetV2/Repeat_2/block8_9/Branch_1/Conv2d_0c_3x1/weights',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_401/beta': 'InceptionResnetV2/Repeat_2/block8_9/Branch_1/Conv2d_0c_3x1/BatchNorm/beta',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_401/moving_mean': 'InceptionResnetV2/Repeat_2/block8_9/Branch_1/Conv2d_0c_3x1/BatchNorm/moving_mean',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_401/moving_variance': 'InceptionResnetV2/Repeat_2/block8_9/Branch_1/Conv2d_0c_3x1/BatchNorm/moving_variance',
- 'SecondStageFeatureExtractor/InceptionResnetV2/block8_9_conv/kernel': 'InceptionResnetV2/Repeat_2/block8_9/Conv2d_1x1/weights',
- 'SecondStageFeatureExtractor/InceptionResnetV2/block8_9_conv/bias': 'InceptionResnetV2/Repeat_2/block8_9/Conv2d_1x1/biases',
- 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_402/kernel': 'InceptionResnetV2/Block8/Branch_0/Conv2d_1x1/weights',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_402/beta': 'InceptionResnetV2/Block8/Branch_0/Conv2d_1x1/BatchNorm/beta',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_402/moving_mean': 'InceptionResnetV2/Block8/Branch_0/Conv2d_1x1/BatchNorm/moving_mean',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_402/moving_variance': 'InceptionResnetV2/Block8/Branch_0/Conv2d_1x1/BatchNorm/moving_variance',
- 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_403/kernel': 'InceptionResnetV2/Block8/Branch_1/Conv2d_0a_1x1/weights',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_403/beta': 'InceptionResnetV2/Block8/Branch_1/Conv2d_0a_1x1/BatchNorm/beta',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_403/moving_mean': 'InceptionResnetV2/Block8/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_mean',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_403/moving_variance': 'InceptionResnetV2/Block8/Branch_1/Conv2d_0a_1x1/BatchNorm/moving_variance',
- 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_404/kernel': 'InceptionResnetV2/Block8/Branch_1/Conv2d_0b_1x3/weights',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_404/beta': 'InceptionResnetV2/Block8/Branch_1/Conv2d_0b_1x3/BatchNorm/beta',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_404/moving_mean': 'InceptionResnetV2/Block8/Branch_1/Conv2d_0b_1x3/BatchNorm/moving_mean',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_404/moving_variance': 'InceptionResnetV2/Block8/Branch_1/Conv2d_0b_1x3/BatchNorm/moving_variance',
- 'SecondStageFeatureExtractor/InceptionResnetV2/conv2d_405/kernel': 'InceptionResnetV2/Block8/Branch_1/Conv2d_0c_3x1/weights',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_405/beta': 'InceptionResnetV2/Block8/Branch_1/Conv2d_0c_3x1/BatchNorm/beta',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_405/moving_mean': 'InceptionResnetV2/Block8/Branch_1/Conv2d_0c_3x1/BatchNorm/moving_mean',
- 'SecondStageFeatureExtractor/InceptionResnetV2/freezable_batch_norm_405/moving_variance': 'InceptionResnetV2/Block8/Branch_1/Conv2d_0c_3x1/BatchNorm/moving_variance',
- 'SecondStageFeatureExtractor/InceptionResnetV2/block8_10_conv/kernel': 'InceptionResnetV2/Block8/Conv2d_1x1/weights',
- 'SecondStageFeatureExtractor/InceptionResnetV2/block8_10_conv/bias': 'InceptionResnetV2/Block8/Conv2d_1x1/biases',
- 'SecondStageFeatureExtractor/InceptionResnetV2/conv_7b/kernel': 'InceptionResnetV2/Conv2d_7b_1x1/weights',
- 'SecondStageFeatureExtractor/InceptionResnetV2/conv_7b_bn/beta': 'InceptionResnetV2/Conv2d_7b_1x1/BatchNorm/beta',
- 'SecondStageFeatureExtractor/InceptionResnetV2/conv_7b_bn/moving_mean': 'InceptionResnetV2/Conv2d_7b_1x1/BatchNorm/moving_mean',
- 'SecondStageFeatureExtractor/InceptionResnetV2/conv_7b_bn/moving_variance': 'InceptionResnetV2/Conv2d_7b_1x1/BatchNorm/moving_variance',
- }
-
- variables_to_restore = {}
- if tf.executing_eagerly():
- for key in self._variable_dict:
- # variable.name includes ":0" at the end, but the names in the
- # checkpoint do not have the suffix ":0". So, we strip it here.
- var_name = keras_to_slim_name_mapping.get(key)
- if var_name:
- variables_to_restore[var_name] = self._variable_dict[key]
- else:
- for variable in variables_helper.get_global_variables_safely():
- var_name = keras_to_slim_name_mapping.get(variable.op.name)
- if var_name:
- variables_to_restore[var_name] = variable
- return variables_to_restore
diff --git a/research/object_detection/models/faster_rcnn_inception_resnet_v2_keras_feature_extractor_test.py b/research/object_detection/models/faster_rcnn_inception_resnet_v2_keras_feature_extractor_tf2_test.py
similarity index 67%
rename from research/object_detection/models/faster_rcnn_inception_resnet_v2_keras_feature_extractor_test.py
rename to research/object_detection/models/faster_rcnn_inception_resnet_v2_keras_feature_extractor_tf2_test.py
index c8227603ac8737a77a9c5857eb6023c97997757c..20bb50ef836aaf71448f9711f430b532d5a01b5b 100644
--- a/research/object_detection/models/faster_rcnn_inception_resnet_v2_keras_feature_extractor_test.py
+++ b/research/object_detection/models/faster_rcnn_inception_resnet_v2_keras_feature_extractor_tf2_test.py
@@ -14,12 +14,14 @@
# ==============================================================================
"""Tests for models.faster_rcnn_inception_resnet_v2_keras_feature_extractor."""
-
+import unittest
import tensorflow.compat.v1 as tf
from object_detection.models import faster_rcnn_inception_resnet_v2_keras_feature_extractor as frcnn_inc_res
+from object_detection.utils import tf_version
+@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.')
class FasterRcnnInceptionResnetV2KerasFeatureExtractorTest(tf.test.TestCase):
def _build_feature_extractor(self, first_stage_features_stride):
@@ -38,11 +40,7 @@ class FasterRcnnInceptionResnetV2KerasFeatureExtractorTest(tf.test.TestCase):
name='TestScope')(preprocessed_inputs)
features_shape = tf.shape(rpn_feature_map)
- init_op = tf.global_variables_initializer()
- with self.test_session() as sess:
- sess.run(init_op)
- features_shape_out = sess.run(features_shape)
- self.assertAllEqual(features_shape_out, [1, 19, 19, 1088])
+ self.assertAllEqual(features_shape.numpy(), [1, 19, 19, 1088])
def test_extract_proposal_features_stride_eight(self):
feature_extractor = self._build_feature_extractor(
@@ -53,11 +51,7 @@ class FasterRcnnInceptionResnetV2KerasFeatureExtractorTest(tf.test.TestCase):
name='TestScope')(preprocessed_inputs)
features_shape = tf.shape(rpn_feature_map)
- init_op = tf.global_variables_initializer()
- with self.test_session() as sess:
- sess.run(init_op)
- features_shape_out = sess.run(features_shape)
- self.assertAllEqual(features_shape_out, [1, 28, 28, 1088])
+ self.assertAllEqual(features_shape.numpy(), [1, 28, 28, 1088])
def test_extract_proposal_features_half_size_input(self):
feature_extractor = self._build_feature_extractor(
@@ -67,25 +61,7 @@ class FasterRcnnInceptionResnetV2KerasFeatureExtractorTest(tf.test.TestCase):
rpn_feature_map = feature_extractor.get_proposal_feature_extractor_model(
name='TestScope')(preprocessed_inputs)
features_shape = tf.shape(rpn_feature_map)
-
- init_op = tf.global_variables_initializer()
- with self.test_session() as sess:
- sess.run(init_op)
- features_shape_out = sess.run(features_shape)
- self.assertAllEqual(features_shape_out, [1, 7, 7, 1088])
-
- def test_extract_proposal_features_dies_on_invalid_stride(self):
- with self.assertRaises(ValueError):
- self._build_feature_extractor(first_stage_features_stride=99)
-
- def test_extract_proposal_features_dies_with_incorrect_rank_inputs(self):
- feature_extractor = self._build_feature_extractor(
- first_stage_features_stride=16)
- preprocessed_inputs = tf.random_uniform(
- [224, 224, 3], maxval=255, dtype=tf.float32)
- with self.assertRaises(ValueError):
- feature_extractor.get_proposal_feature_extractor_model(
- name='TestScope')(preprocessed_inputs)
+ self.assertAllEqual(features_shape.numpy(), [1, 7, 7, 1088])
def test_extract_box_classifier_features_returns_expected_size(self):
feature_extractor = self._build_feature_extractor(
@@ -97,12 +73,7 @@ class FasterRcnnInceptionResnetV2KerasFeatureExtractorTest(tf.test.TestCase):
proposal_classifier_features = (
model(proposal_feature_maps))
features_shape = tf.shape(proposal_classifier_features)
-
- init_op = tf.global_variables_initializer()
- with self.test_session() as sess:
- sess.run(init_op)
- features_shape_out = sess.run(features_shape)
- self.assertAllEqual(features_shape_out, [2, 8, 8, 1536])
+ self.assertAllEqual(features_shape.numpy(), [2, 9, 9, 1536])
if __name__ == '__main__':
diff --git a/research/object_detection/models/faster_rcnn_inception_v2_feature_extractor_test.py b/research/object_detection/models/faster_rcnn_inception_v2_feature_extractor_tf1_test.py
similarity index 97%
rename from research/object_detection/models/faster_rcnn_inception_v2_feature_extractor_test.py
rename to research/object_detection/models/faster_rcnn_inception_v2_feature_extractor_tf1_test.py
index 600c699c8fe196a30256590ddf68281b9846fc2d..f5d01145f291f7b795a917e5a96632d52b42bac5 100644
--- a/research/object_detection/models/faster_rcnn_inception_v2_feature_extractor_test.py
+++ b/research/object_detection/models/faster_rcnn_inception_v2_feature_extractor_tf1_test.py
@@ -14,13 +14,15 @@
# ==============================================================================
"""Tests for faster_rcnn_inception_v2_feature_extractor."""
-
+import unittest
import numpy as np
import tensorflow.compat.v1 as tf
from object_detection.models import faster_rcnn_inception_v2_feature_extractor as faster_rcnn_inception_v2
+from object_detection.utils import tf_version
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.')
class FasterRcnnInceptionV2FeatureExtractorTest(tf.test.TestCase):
def _build_feature_extractor(self, first_stage_features_stride):
diff --git a/research/object_detection/models/faster_rcnn_mobilenet_v1_feature_extractor_test.py b/research/object_detection/models/faster_rcnn_mobilenet_v1_feature_extractor_tf1_test.py
similarity index 97%
rename from research/object_detection/models/faster_rcnn_mobilenet_v1_feature_extractor_test.py
rename to research/object_detection/models/faster_rcnn_mobilenet_v1_feature_extractor_tf1_test.py
index 39d6d234d1d7fb902ebdc92e457a2ceeca1bdd3f..65a4958e4c20964b2857f95f7bc2b83d05d3cc02 100644
--- a/research/object_detection/models/faster_rcnn_mobilenet_v1_feature_extractor_test.py
+++ b/research/object_detection/models/faster_rcnn_mobilenet_v1_feature_extractor_tf1_test.py
@@ -14,13 +14,15 @@
# ==============================================================================
"""Tests for faster_rcnn_mobilenet_v1_feature_extractor."""
-
+import unittest
import numpy as np
import tensorflow.compat.v1 as tf
from object_detection.models import faster_rcnn_mobilenet_v1_feature_extractor as faster_rcnn_mobilenet_v1
+from object_detection.utils import tf_version
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.')
class FasterRcnnMobilenetV1FeatureExtractorTest(tf.test.TestCase):
def _build_feature_extractor(self, first_stage_features_stride):
diff --git a/research/object_detection/models/faster_rcnn_nas_feature_extractor.py b/research/object_detection/models/faster_rcnn_nas_feature_extractor.py
index b1f5e1e6ec533fc698bef7bc23c54a2788aec949..9fe17cbea856dd1ed8ca0bf1a8c25327714c5b6d 100644
--- a/research/object_detection/models/faster_rcnn_nas_feature_extractor.py
+++ b/research/object_detection/models/faster_rcnn_nas_feature_extractor.py
@@ -31,8 +31,14 @@ import tf_slim as slim
from object_detection.meta_architectures import faster_rcnn_meta_arch
from object_detection.utils import variables_helper
-from nets.nasnet import nasnet
-from nets.nasnet import nasnet_utils
+
+# pylint: disable=g-import-not-at-top
+try:
+ from nets.nasnet import nasnet
+ from nets.nasnet import nasnet_utils
+except: # pylint: disable=bare-except
+ pass
+# pylint: enable=g-import-not-at-top
arg_scope = slim.arg_scope
diff --git a/research/object_detection/models/faster_rcnn_nas_feature_extractor_test.py b/research/object_detection/models/faster_rcnn_nas_feature_extractor_tf1_test.py
similarity index 97%
rename from research/object_detection/models/faster_rcnn_nas_feature_extractor_test.py
rename to research/object_detection/models/faster_rcnn_nas_feature_extractor_tf1_test.py
index 4f7e5bed983fcac3718daa652daf8df35ee3d48d..a41cb0f733d613ffb050bbf4f8506579375c9d08 100644
--- a/research/object_detection/models/faster_rcnn_nas_feature_extractor_test.py
+++ b/research/object_detection/models/faster_rcnn_nas_feature_extractor_tf1_test.py
@@ -14,12 +14,14 @@
# ==============================================================================
"""Tests for models.faster_rcnn_nas_feature_extractor."""
-
+import unittest
import tensorflow.compat.v1 as tf
from object_detection.models import faster_rcnn_nas_feature_extractor as frcnn_nas
+from object_detection.utils import tf_version
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.')
class FasterRcnnNASFeatureExtractorTest(tf.test.TestCase):
def _build_feature_extractor(self, first_stage_features_stride):
diff --git a/research/object_detection/models/faster_rcnn_pnas_feature_extractor.py b/research/object_detection/models/faster_rcnn_pnas_feature_extractor.py
index 7f4ff7e8fee8b1df08cd76857ba77406a08bd903..ec32cd309d3a3fe135cf72665631b04273e21424 100644
--- a/research/object_detection/models/faster_rcnn_pnas_feature_extractor.py
+++ b/research/object_detection/models/faster_rcnn_pnas_feature_extractor.py
@@ -30,7 +30,11 @@ import tf_slim as slim
from object_detection.meta_architectures import faster_rcnn_meta_arch
from object_detection.utils import variables_helper
from nets.nasnet import nasnet_utils
-from nets.nasnet import pnasnet
+
+try:
+ from nets.nasnet import pnasnet # pylint: disable=g-import-not-at-top
+except: # pylint: disable=bare-except
+ pass
arg_scope = slim.arg_scope
diff --git a/research/object_detection/models/faster_rcnn_pnas_feature_extractor_test.py b/research/object_detection/models/faster_rcnn_pnas_feature_extractor_tf1_test.py
similarity index 97%
rename from research/object_detection/models/faster_rcnn_pnas_feature_extractor_test.py
rename to research/object_detection/models/faster_rcnn_pnas_feature_extractor_tf1_test.py
index 46b822fd25b838d67db6be5b495c3e591e69f08d..16774511b4d9c6eb1c94b8304640d9bf99c47ce0 100644
--- a/research/object_detection/models/faster_rcnn_pnas_feature_extractor_test.py
+++ b/research/object_detection/models/faster_rcnn_pnas_feature_extractor_tf1_test.py
@@ -14,12 +14,14 @@
# ==============================================================================
"""Tests for models.faster_rcnn_pnas_feature_extractor."""
-
+import unittest
import tensorflow.compat.v1 as tf
from object_detection.models import faster_rcnn_pnas_feature_extractor as frcnn_pnas
+from object_detection.utils import tf_version
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.')
class FasterRcnnPNASFeatureExtractorTest(tf.test.TestCase):
def _build_feature_extractor(self, first_stage_features_stride):
diff --git a/research/object_detection/models/faster_rcnn_resnet_keras_feature_extractor.py b/research/object_detection/models/faster_rcnn_resnet_keras_feature_extractor.py
new file mode 100644
index 0000000000000000000000000000000000000000..a6b1e25404c71be5a3b68df9ce85416ffd4e982e
--- /dev/null
+++ b/research/object_detection/models/faster_rcnn_resnet_keras_feature_extractor.py
@@ -0,0 +1,254 @@
+# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+
+"""Resnet based Faster R-CNN implementation in Keras.
+
+See Deep Residual Learning for Image Recognition by He et al.
+https://arxiv.org/abs/1512.03385
+"""
+
+import tensorflow.compat.v1 as tf
+
+from object_detection.meta_architectures import faster_rcnn_meta_arch
+from object_detection.models.keras_models import resnet_v1
+from object_detection.utils import model_util
+
+
+_RESNET_MODEL_CONV4_LAST_LAYERS = {
+ 'resnet_v1_50': 'conv4_block6_out',
+ 'resnet_v1_101': 'conv4_block23_out',
+ 'resnet_v1_152': 'conv4_block36_out',
+}
+
+
+class FasterRCNNResnetKerasFeatureExtractor(
+ faster_rcnn_meta_arch.FasterRCNNKerasFeatureExtractor):
+ """Faster R-CNN with Resnet feature extractor implementation."""
+
+ def __init__(self,
+ is_training,
+ resnet_v1_base_model,
+ resnet_v1_base_model_name,
+ first_stage_features_stride=16,
+ batch_norm_trainable=False,
+ weight_decay=0.0):
+ """Constructor.
+
+ Args:
+ is_training: See base class.
+ resnet_v1_base_model: base resnet v1 network to use. One of
+ the resnet_v1.resnet_v1_{50,101,152} models.
+ resnet_v1_base_model_name: model name under which to construct resnet v1.
+ first_stage_features_stride: See base class.
+ batch_norm_trainable: See base class.
+ weight_decay: See base class.
+
+ Raises:
+ ValueError: If `first_stage_features_stride` is not 8 or 16.
+ """
+ if first_stage_features_stride != 16:
+ raise ValueError('`first_stage_features_stride` must be 16.')
+ super(FasterRCNNResnetKerasFeatureExtractor, self).__init__(
+ is_training, first_stage_features_stride, batch_norm_trainable,
+ weight_decay)
+ self.classification_backbone = None
+ self._variable_dict = {}
+ self._resnet_v1_base_model = resnet_v1_base_model
+ self._resnet_v1_base_model_name = resnet_v1_base_model_name
+
+ def preprocess(self, resized_inputs):
+ """Faster R-CNN Resnet V1 preprocessing.
+
+ VGG style channel mean subtraction as described here:
+ https://gist.github.com/ksimonyan/211839e770f7b538e2d8#file-readme-md
+ Note that if the number of channels is not equal to 3, the mean subtraction
+ will be skipped and the original resized_inputs will be returned.
+
+ Args:
+ resized_inputs: A [batch, height_in, width_in, channels] float32 tensor
+ representing a batch of images with values between 0 and 255.0.
+
+ Returns:
+ preprocessed_inputs: A [batch, height_out, width_out, channels] float32
+ tensor representing a batch of images.
+
+ """
+ if resized_inputs.shape.as_list()[3] == 3:
+ channel_means = [123.68, 116.779, 103.939]
+ return resized_inputs - [[channel_means]]
+ else:
+ return resized_inputs
+
+ def get_proposal_feature_extractor_model(self, name=None):
+ """Returns a model that extracts first stage RPN features.
+
+ Extracts features using the first half of the Resnet v1 network.
+
+ Args:
+ name: A scope name to construct all variables within.
+
+ Returns:
+ A Keras model that takes preprocessed_inputs:
+ A [batch, height, width, channels] float32 tensor
+ representing a batch of images.
+
+ And returns rpn_feature_map:
+ A tensor with shape [batch, height, width, depth]
+ """
+ if not self.classification_backbone:
+ self.classification_backbone = self._resnet_v1_base_model(
+ batchnorm_training=self._train_batch_norm,
+ conv_hyperparams=None,
+ weight_decay=self._weight_decay,
+ classes=None,
+ weights=None,
+ include_top=False
+ )
+ with tf.name_scope(name):
+ with tf.name_scope('ResnetV1'):
+
+ conv4_last_layer = _RESNET_MODEL_CONV4_LAST_LAYERS[
+ self._resnet_v1_base_model_name]
+ proposal_features = self.classification_backbone.get_layer(
+ name=conv4_last_layer).output
+ keras_model = tf.keras.Model(
+ inputs=self.classification_backbone.inputs,
+ outputs=proposal_features)
+ for variable in keras_model.variables:
+ self._variable_dict[variable.name[:-2]] = variable
+ return keras_model
+
+ def get_box_classifier_feature_extractor_model(self, name=None):
+ """Returns a model that extracts second stage box classifier features.
+
+ This function reconstructs the "second half" of the ResNet v1
+ network after the part defined in `get_proposal_feature_extractor_model`.
+
+ Args:
+ name: A scope name to construct all variables within.
+
+ Returns:
+ A Keras model that takes proposal_feature_maps:
+ A 4-D float tensor with shape
+ [batch_size * self.max_num_proposals, crop_height, crop_width, depth]
+ representing the feature map cropped to each proposal.
+ And returns proposal_classifier_features:
+ A 4-D float tensor with shape
+ [batch_size * self.max_num_proposals, height, width, depth]
+ representing box classifier features for each proposal.
+ """
+ if not self.classification_backbone:
+ self.classification_backbone = self._resnet_v1_base_model(
+ batchnorm_training=self._train_batch_norm,
+ conv_hyperparams=None,
+ weight_decay=self._weight_decay,
+ classes=None,
+ weights=None,
+ include_top=False
+ )
+ with tf.name_scope(name):
+ with tf.name_scope('ResnetV1'):
+ conv4_last_layer = _RESNET_MODEL_CONV4_LAST_LAYERS[
+ self._resnet_v1_base_model_name]
+ proposal_feature_maps = self.classification_backbone.get_layer(
+ name=conv4_last_layer).output
+ proposal_classifier_features = self.classification_backbone.get_layer(
+ name='conv5_block3_out').output
+
+ keras_model = model_util.extract_submodel(
+ model=self.classification_backbone,
+ inputs=proposal_feature_maps,
+ outputs=proposal_classifier_features)
+ for variable in keras_model.variables:
+ self._variable_dict[variable.name[:-2]] = variable
+ return keras_model
+
+
+class FasterRCNNResnet50KerasFeatureExtractor(
+ FasterRCNNResnetKerasFeatureExtractor):
+ """Faster R-CNN with Resnet50 feature extractor implementation."""
+
+ def __init__(self,
+ is_training,
+ first_stage_features_stride=16,
+ batch_norm_trainable=False,
+ weight_decay=0.0):
+ """Constructor.
+
+ Args:
+ is_training: See base class.
+ first_stage_features_stride: See base class.
+ batch_norm_trainable: See base class.
+ weight_decay: See base class.
+ """
+ super(FasterRCNNResnet50KerasFeatureExtractor, self).__init__(
+ is_training=is_training,
+ resnet_v1_base_model=resnet_v1.resnet_v1_50,
+ resnet_v1_base_model_name='resnet_v1_50',
+ first_stage_features_stride=first_stage_features_stride,
+ batch_norm_trainable=batch_norm_trainable,
+ weight_decay=weight_decay)
+
+
+class FasterRCNNResnet101KerasFeatureExtractor(
+ FasterRCNNResnetKerasFeatureExtractor):
+ """Faster R-CNN with Resnet101 feature extractor implementation."""
+
+ def __init__(self,
+ is_training,
+ first_stage_features_stride=16,
+ batch_norm_trainable=False,
+ weight_decay=0.0):
+ """Constructor.
+
+ Args:
+ is_training: See base class.
+ first_stage_features_stride: See base class.
+ batch_norm_trainable: See base class.
+ weight_decay: See base class.
+ """
+ super(FasterRCNNResnet101KerasFeatureExtractor, self).__init__(
+ is_training=is_training,
+ resnet_v1_base_model=resnet_v1.resnet_v1_101,
+ resnet_v1_base_model_name='resnet_v1_101',
+ first_stage_features_stride=first_stage_features_stride,
+ batch_norm_trainable=batch_norm_trainable,
+ weight_decay=weight_decay)
+
+
+class FasterRCNNResnet152KerasFeatureExtractor(
+ FasterRCNNResnetKerasFeatureExtractor):
+ """Faster R-CNN with Resnet152 feature extractor implementation."""
+
+ def __init__(self,
+ is_training,
+ first_stage_features_stride=16,
+ batch_norm_trainable=False,
+ weight_decay=0.0):
+ """Constructor.
+
+ Args:
+ is_training: See base class.
+ first_stage_features_stride: See base class.
+ batch_norm_trainable: See base class.
+ weight_decay: See base class.
+ """
+ super(FasterRCNNResnet152KerasFeatureExtractor, self).__init__(
+ is_training=is_training,
+ resnet_v1_base_model=resnet_v1.resnet_v1_152,
+ resnet_v1_base_model_name='resnet_v1_152',
+ first_stage_features_stride=first_stage_features_stride,
+ batch_norm_trainable=batch_norm_trainable,
+ weight_decay=weight_decay)
diff --git a/research/object_detection/models/faster_rcnn_resnet_keras_feature_extractor_tf2_test.py b/research/object_detection/models/faster_rcnn_resnet_keras_feature_extractor_tf2_test.py
new file mode 100644
index 0000000000000000000000000000000000000000..15e8a5fbf153cdee690be94d2d9c910070af35f0
--- /dev/null
+++ b/research/object_detection/models/faster_rcnn_resnet_keras_feature_extractor_tf2_test.py
@@ -0,0 +1,80 @@
+# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+
+"""Tests for models.faster_rcnn_resnet_keras_feature_extractor."""
+import unittest
+import tensorflow.compat.v1 as tf
+
+from object_detection.models import faster_rcnn_resnet_keras_feature_extractor as frcnn_res
+from object_detection.utils import tf_version
+
+
+@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.')
+class FasterRcnnResnetKerasFeatureExtractorTest(tf.test.TestCase):
+
+ def _build_feature_extractor(self, architecture='resnet_v1_50'):
+ return frcnn_res.FasterRCNNResnet50KerasFeatureExtractor(
+ is_training=False,
+ first_stage_features_stride=16,
+ batch_norm_trainable=False,
+ weight_decay=0.0)
+
+ def test_extract_proposal_features_returns_expected_size(self):
+ feature_extractor = self._build_feature_extractor()
+ preprocessed_inputs = tf.random_uniform(
+ [1, 224, 224, 3], maxval=255, dtype=tf.float32)
+ rpn_feature_map = feature_extractor.get_proposal_feature_extractor_model(
+ name='TestScope')(preprocessed_inputs)
+ features_shape = tf.shape(rpn_feature_map)
+ self.assertAllEqual(features_shape.numpy(), [1, 14, 14, 1024])
+
+ def test_extract_proposal_features_half_size_input(self):
+ feature_extractor = self._build_feature_extractor()
+ preprocessed_inputs = tf.random_uniform(
+ [1, 112, 112, 3], maxval=255, dtype=tf.float32)
+ rpn_feature_map = feature_extractor.get_proposal_feature_extractor_model(
+ name='TestScope')(preprocessed_inputs)
+ features_shape = tf.shape(rpn_feature_map)
+ self.assertAllEqual(features_shape.numpy(), [1, 7, 7, 1024])
+
+ def test_extract_proposal_features_dies_with_incorrect_rank_inputs(self):
+ feature_extractor = self._build_feature_extractor()
+ preprocessed_inputs = tf.random_uniform(
+ [224, 224, 3], maxval=255, dtype=tf.float32)
+ with self.assertRaises(tf.errors.InvalidArgumentError):
+ feature_extractor.get_proposal_feature_extractor_model(
+ name='TestScope')(preprocessed_inputs)
+
+ def test_extract_box_classifier_features_returns_expected_size(self):
+ feature_extractor = self._build_feature_extractor()
+ proposal_feature_maps = tf.random_uniform(
+ [3, 7, 7, 1024], maxval=255, dtype=tf.float32)
+ model = feature_extractor.get_box_classifier_feature_extractor_model(
+ name='TestScope')
+ proposal_classifier_features = (
+ model(proposal_feature_maps))
+ features_shape = tf.shape(proposal_classifier_features)
+ # Note: due to a slight mismatch in slim and keras resnet definitions
+ # the output shape of the box classifier is slightly different compared to
+ # that of the slim implementation. The keras version is more `canonical`
+ # in that it more accurately reflects the original authors' implementation.
+ # TODO(jonathanhuang): make the output shape match that of the slim
+ # implementation by using atrous convolutions.
+ self.assertAllEqual(features_shape.numpy(), [3, 4, 4, 2048])
+
+
+if __name__ == '__main__':
+ tf.enable_v2_behavior()
+ tf.test.main()
diff --git a/research/object_detection/models/faster_rcnn_resnet_v1_feature_extractor_test.py b/research/object_detection/models/faster_rcnn_resnet_v1_feature_extractor_tf1_test.py
similarity index 98%
rename from research/object_detection/models/faster_rcnn_resnet_v1_feature_extractor_test.py
rename to research/object_detection/models/faster_rcnn_resnet_v1_feature_extractor_tf1_test.py
index 0b5055a0c6829bdcdadf6c2fb6295ce67e3e137a..3d47da04af5fb3f728379a649d64329c862eaf75 100644
--- a/research/object_detection/models/faster_rcnn_resnet_v1_feature_extractor_test.py
+++ b/research/object_detection/models/faster_rcnn_resnet_v1_feature_extractor_tf1_test.py
@@ -14,13 +14,15 @@
# ==============================================================================
"""Tests for object_detection.models.faster_rcnn_resnet_v1_feature_extractor."""
-
+import unittest
import numpy as np
import tensorflow.compat.v1 as tf
from object_detection.models import faster_rcnn_resnet_v1_feature_extractor as faster_rcnn_resnet_v1
+from object_detection.utils import tf_version
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.')
class FasterRcnnResnetV1FeatureExtractorTest(tf.test.TestCase):
def _build_feature_extractor(self,
diff --git a/research/object_detection/models/feature_map_generators_test.py b/research/object_detection/models/feature_map_generators_test.py
index 49ba09bdf0681374643c349bed5d5d319cc90a9a..951e7760bd8a42afb19f61b6c6bc1c1f744d74dd 100644
--- a/research/object_detection/models/feature_map_generators_test.py
+++ b/research/object_detection/models/feature_map_generators_test.py
@@ -14,7 +14,7 @@
# ==============================================================================
"""Tests for feature map generators."""
-
+import unittest
from absl.testing import parameterized
import numpy as np
@@ -25,6 +25,9 @@ from google.protobuf import text_format
from object_detection.builders import hyperparams_builder
from object_detection.models import feature_map_generators
from object_detection.protos import hyperparams_pb2
+from object_detection.utils import test_case
+from object_detection.utils import test_utils
+from object_detection.utils import tf_version
INCEPTION_V2_LAYOUT = {
'from_layer': ['Mixed_3c', 'Mixed_4c', 'Mixed_5c', '', '', ''],
@@ -52,11 +55,7 @@ SSD_MOBILENET_V1_WEIGHT_SHARED_LAYOUT = {
}
-@parameterized.parameters(
- {'use_keras': False},
- {'use_keras': True},
-)
-class MultiResolutionFeatureMapGeneratorTest(tf.test.TestCase):
+class MultiResolutionFeatureMapGeneratorTest(test_case.TestCase):
def _build_conv_hyperparams(self):
conv_hyperparams = hyperparams_pb2.Hyperparams()
@@ -73,9 +72,9 @@ class MultiResolutionFeatureMapGeneratorTest(tf.test.TestCase):
text_format.Merge(conv_hyperparams_text_proto, conv_hyperparams)
return hyperparams_builder.KerasLayerHyperparams(conv_hyperparams)
- def _build_feature_map_generator(self, feature_map_layout, use_keras,
+ def _build_feature_map_generator(self, feature_map_layout,
pool_residual=False):
- if use_keras:
+ if tf_version.is_tf2():
return feature_map_generators.KerasMultiResolutionFeatureMaps(
feature_map_layout=feature_map_layout,
depth_multiplier=1,
@@ -97,17 +96,18 @@ class MultiResolutionFeatureMapGeneratorTest(tf.test.TestCase):
pool_residual=pool_residual)
return feature_map_generator
- def test_get_expected_feature_map_shapes_with_inception_v2(self, use_keras):
- image_features = {
- 'Mixed_3c': tf.random_uniform([4, 28, 28, 256], dtype=tf.float32),
- 'Mixed_4c': tf.random_uniform([4, 14, 14, 576], dtype=tf.float32),
- 'Mixed_5c': tf.random_uniform([4, 7, 7, 1024], dtype=tf.float32)
- }
- feature_map_generator = self._build_feature_map_generator(
- feature_map_layout=INCEPTION_V2_LAYOUT,
- use_keras=use_keras
- )
- feature_maps = feature_map_generator(image_features)
+ def test_get_expected_feature_map_shapes_with_inception_v2(self):
+ with test_utils.GraphContextOrNone() as g:
+ image_features = {
+ 'Mixed_3c': tf.random_uniform([4, 28, 28, 256], dtype=tf.float32),
+ 'Mixed_4c': tf.random_uniform([4, 14, 14, 576], dtype=tf.float32),
+ 'Mixed_5c': tf.random_uniform([4, 7, 7, 1024], dtype=tf.float32)
+ }
+ feature_map_generator = self._build_feature_map_generator(
+ feature_map_layout=INCEPTION_V2_LAYOUT)
+ def graph_fn():
+ feature_maps = feature_map_generator(image_features)
+ return feature_maps
expected_feature_map_shapes = {
'Mixed_3c': (4, 28, 28, 256),
@@ -116,29 +116,25 @@ class MultiResolutionFeatureMapGeneratorTest(tf.test.TestCase):
'Mixed_5c_2_Conv2d_3_3x3_s2_512': (4, 4, 4, 512),
'Mixed_5c_2_Conv2d_4_3x3_s2_256': (4, 2, 2, 256),
'Mixed_5c_2_Conv2d_5_3x3_s2_256': (4, 1, 1, 256)}
-
- init_op = tf.global_variables_initializer()
- with self.test_session() as sess:
- sess.run(init_op)
- out_feature_maps = sess.run(feature_maps)
- out_feature_map_shapes = dict(
- (key, value.shape) for key, value in out_feature_maps.items())
- self.assertDictEqual(expected_feature_map_shapes, out_feature_map_shapes)
+ out_feature_maps = self.execute(graph_fn, [], g)
+ out_feature_map_shapes = dict(
+ (key, value.shape) for key, value in out_feature_maps.items())
+ self.assertDictEqual(expected_feature_map_shapes, out_feature_map_shapes)
def test_get_expected_feature_map_shapes_with_inception_v2_use_depthwise(
- self, use_keras):
- image_features = {
- 'Mixed_3c': tf.random_uniform([4, 28, 28, 256], dtype=tf.float32),
- 'Mixed_4c': tf.random_uniform([4, 14, 14, 576], dtype=tf.float32),
- 'Mixed_5c': tf.random_uniform([4, 7, 7, 1024], dtype=tf.float32)
- }
- layout_copy = INCEPTION_V2_LAYOUT.copy()
- layout_copy['use_depthwise'] = True
- feature_map_generator = self._build_feature_map_generator(
- feature_map_layout=layout_copy,
- use_keras=use_keras
- )
- feature_maps = feature_map_generator(image_features)
+ self):
+ with test_utils.GraphContextOrNone() as g:
+ image_features = {
+ 'Mixed_3c': tf.random_uniform([4, 28, 28, 256], dtype=tf.float32),
+ 'Mixed_4c': tf.random_uniform([4, 14, 14, 576], dtype=tf.float32),
+ 'Mixed_5c': tf.random_uniform([4, 7, 7, 1024], dtype=tf.float32)
+ }
+ layout_copy = INCEPTION_V2_LAYOUT.copy()
+ layout_copy['use_depthwise'] = True
+ feature_map_generator = self._build_feature_map_generator(
+ feature_map_layout=layout_copy)
+ def graph_fn():
+ return feature_map_generator(image_features)
expected_feature_map_shapes = {
'Mixed_3c': (4, 28, 28, 256),
@@ -147,29 +143,25 @@ class MultiResolutionFeatureMapGeneratorTest(tf.test.TestCase):
'Mixed_5c_2_Conv2d_3_3x3_s2_512': (4, 4, 4, 512),
'Mixed_5c_2_Conv2d_4_3x3_s2_256': (4, 2, 2, 256),
'Mixed_5c_2_Conv2d_5_3x3_s2_256': (4, 1, 1, 256)}
-
- init_op = tf.global_variables_initializer()
- with self.test_session() as sess:
- sess.run(init_op)
- out_feature_maps = sess.run(feature_maps)
- out_feature_map_shapes = dict(
- (key, value.shape) for key, value in out_feature_maps.items())
- self.assertDictEqual(expected_feature_map_shapes, out_feature_map_shapes)
-
- def test_get_expected_feature_map_shapes_use_explicit_padding(
- self, use_keras):
- image_features = {
- 'Mixed_3c': tf.random_uniform([4, 28, 28, 256], dtype=tf.float32),
- 'Mixed_4c': tf.random_uniform([4, 14, 14, 576], dtype=tf.float32),
- 'Mixed_5c': tf.random_uniform([4, 7, 7, 1024], dtype=tf.float32)
- }
- layout_copy = INCEPTION_V2_LAYOUT.copy()
- layout_copy['use_explicit_padding'] = True
- feature_map_generator = self._build_feature_map_generator(
- feature_map_layout=layout_copy,
- use_keras=use_keras
- )
- feature_maps = feature_map_generator(image_features)
+ out_feature_maps = self.execute(graph_fn, [], g)
+ out_feature_map_shapes = dict(
+ (key, value.shape) for key, value in out_feature_maps.items())
+ self.assertDictEqual(expected_feature_map_shapes, out_feature_map_shapes)
+
+ def test_get_expected_feature_map_shapes_use_explicit_padding(self):
+ with test_utils.GraphContextOrNone() as g:
+ image_features = {
+ 'Mixed_3c': tf.random_uniform([4, 28, 28, 256], dtype=tf.float32),
+ 'Mixed_4c': tf.random_uniform([4, 14, 14, 576], dtype=tf.float32),
+ 'Mixed_5c': tf.random_uniform([4, 7, 7, 1024], dtype=tf.float32)
+ }
+ layout_copy = INCEPTION_V2_LAYOUT.copy()
+ layout_copy['use_explicit_padding'] = True
+ feature_map_generator = self._build_feature_map_generator(
+ feature_map_layout=layout_copy,
+ )
+ def graph_fn():
+ return feature_map_generator(image_features)
expected_feature_map_shapes = {
'Mixed_3c': (4, 28, 28, 256),
@@ -178,27 +170,24 @@ class MultiResolutionFeatureMapGeneratorTest(tf.test.TestCase):
'Mixed_5c_2_Conv2d_3_3x3_s2_512': (4, 4, 4, 512),
'Mixed_5c_2_Conv2d_4_3x3_s2_256': (4, 2, 2, 256),
'Mixed_5c_2_Conv2d_5_3x3_s2_256': (4, 1, 1, 256)}
+ out_feature_maps = self.execute(graph_fn, [], g)
+ out_feature_map_shapes = dict(
+ (key, value.shape) for key, value in out_feature_maps.items())
+ self.assertDictEqual(expected_feature_map_shapes, out_feature_map_shapes)
+
+ def test_get_expected_feature_map_shapes_with_inception_v3(self):
+ with test_utils.GraphContextOrNone() as g:
+ image_features = {
+ 'Mixed_5d': tf.random_uniform([4, 35, 35, 256], dtype=tf.float32),
+ 'Mixed_6e': tf.random_uniform([4, 17, 17, 576], dtype=tf.float32),
+ 'Mixed_7c': tf.random_uniform([4, 8, 8, 1024], dtype=tf.float32)
+ }
- init_op = tf.global_variables_initializer()
- with self.test_session() as sess:
- sess.run(init_op)
- out_feature_maps = sess.run(feature_maps)
- out_feature_map_shapes = dict(
- (key, value.shape) for key, value in out_feature_maps.items())
- self.assertDictEqual(expected_feature_map_shapes, out_feature_map_shapes)
-
- def test_get_expected_feature_map_shapes_with_inception_v3(self, use_keras):
- image_features = {
- 'Mixed_5d': tf.random_uniform([4, 35, 35, 256], dtype=tf.float32),
- 'Mixed_6e': tf.random_uniform([4, 17, 17, 576], dtype=tf.float32),
- 'Mixed_7c': tf.random_uniform([4, 8, 8, 1024], dtype=tf.float32)
- }
-
- feature_map_generator = self._build_feature_map_generator(
- feature_map_layout=INCEPTION_V3_LAYOUT,
- use_keras=use_keras
- )
- feature_maps = feature_map_generator(image_features)
+ feature_map_generator = self._build_feature_map_generator(
+ feature_map_layout=INCEPTION_V3_LAYOUT,
+ )
+ def graph_fn():
+ return feature_map_generator(image_features)
expected_feature_map_shapes = {
'Mixed_5d': (4, 35, 35, 256),
@@ -207,29 +196,26 @@ class MultiResolutionFeatureMapGeneratorTest(tf.test.TestCase):
'Mixed_7c_2_Conv2d_3_3x3_s2_512': (4, 4, 4, 512),
'Mixed_7c_2_Conv2d_4_3x3_s2_256': (4, 2, 2, 256),
'Mixed_7c_2_Conv2d_5_3x3_s2_128': (4, 1, 1, 128)}
-
- init_op = tf.global_variables_initializer()
- with self.test_session() as sess:
- sess.run(init_op)
- out_feature_maps = sess.run(feature_maps)
- out_feature_map_shapes = dict(
- (key, value.shape) for key, value in out_feature_maps.items())
- self.assertDictEqual(expected_feature_map_shapes, out_feature_map_shapes)
+ out_feature_maps = self.execute(graph_fn, [], g)
+ out_feature_map_shapes = dict(
+ (key, value.shape) for key, value in out_feature_maps.items())
+ self.assertDictEqual(expected_feature_map_shapes, out_feature_map_shapes)
def test_get_expected_feature_map_shapes_with_embedded_ssd_mobilenet_v1(
- self, use_keras):
- image_features = {
- 'Conv2d_11_pointwise': tf.random_uniform([4, 16, 16, 512],
- dtype=tf.float32),
- 'Conv2d_13_pointwise': tf.random_uniform([4, 8, 8, 1024],
- dtype=tf.float32),
- }
+ self):
+ with test_utils.GraphContextOrNone() as g:
+ image_features = {
+ 'Conv2d_11_pointwise': tf.random_uniform([4, 16, 16, 512],
+ dtype=tf.float32),
+ 'Conv2d_13_pointwise': tf.random_uniform([4, 8, 8, 1024],
+ dtype=tf.float32),
+ }
- feature_map_generator = self._build_feature_map_generator(
- feature_map_layout=EMBEDDED_SSD_MOBILENET_V1_LAYOUT,
- use_keras=use_keras
- )
- feature_maps = feature_map_generator(image_features)
+ feature_map_generator = self._build_feature_map_generator(
+ feature_map_layout=EMBEDDED_SSD_MOBILENET_V1_LAYOUT,
+ )
+ def graph_fn():
+ return feature_map_generator(image_features)
expected_feature_map_shapes = {
'Conv2d_11_pointwise': (4, 16, 16, 512),
@@ -237,55 +223,50 @@ class MultiResolutionFeatureMapGeneratorTest(tf.test.TestCase):
'Conv2d_13_pointwise_2_Conv2d_2_3x3_s2_512': (4, 4, 4, 512),
'Conv2d_13_pointwise_2_Conv2d_3_3x3_s2_256': (4, 2, 2, 256),
'Conv2d_13_pointwise_2_Conv2d_4_2x2_s2_256': (4, 1, 1, 256)}
-
- init_op = tf.global_variables_initializer()
- with self.test_session() as sess:
- sess.run(init_op)
- out_feature_maps = sess.run(feature_maps)
- out_feature_map_shapes = dict(
- (key, value.shape) for key, value in out_feature_maps.items())
- self.assertDictEqual(expected_feature_map_shapes, out_feature_map_shapes)
+ out_feature_maps = self.execute(graph_fn, [], g)
+ out_feature_map_shapes = dict(
+ (key, value.shape) for key, value in out_feature_maps.items())
+ self.assertDictEqual(expected_feature_map_shapes, out_feature_map_shapes)
def test_feature_map_shapes_with_pool_residual_ssd_mobilenet_v1(
- self, use_keras):
- image_features = {
- 'Conv2d_13_pointwise': tf.random_uniform([4, 8, 8, 1024],
- dtype=tf.float32),
- }
+ self):
+ with test_utils.GraphContextOrNone() as g:
+ image_features = {
+ 'Conv2d_13_pointwise': tf.random_uniform([4, 8, 8, 1024],
+ dtype=tf.float32),
+ }
- feature_map_generator = self._build_feature_map_generator(
- feature_map_layout=SSD_MOBILENET_V1_WEIGHT_SHARED_LAYOUT,
- use_keras=use_keras,
- pool_residual=True
- )
- feature_maps = feature_map_generator(image_features)
+ feature_map_generator = self._build_feature_map_generator(
+ feature_map_layout=SSD_MOBILENET_V1_WEIGHT_SHARED_LAYOUT,
+ pool_residual=True
+ )
+ def graph_fn():
+ return feature_map_generator(image_features)
expected_feature_map_shapes = {
'Conv2d_13_pointwise': (4, 8, 8, 1024),
'Conv2d_13_pointwise_2_Conv2d_1_3x3_s2_256': (4, 4, 4, 256),
'Conv2d_13_pointwise_2_Conv2d_2_3x3_s2_256': (4, 2, 2, 256),
'Conv2d_13_pointwise_2_Conv2d_3_3x3_s2_256': (4, 1, 1, 256)}
+ out_feature_maps = self.execute(graph_fn, [], g)
+ out_feature_map_shapes = dict(
+ (key, value.shape) for key, value in out_feature_maps.items())
+ self.assertDictEqual(expected_feature_map_shapes, out_feature_map_shapes)
+
+ def test_get_expected_variable_names_with_inception_v2(self):
+ with test_utils.GraphContextOrNone() as g:
+ image_features = {
+ 'Mixed_3c': tf.random_uniform([4, 28, 28, 256], dtype=tf.float32),
+ 'Mixed_4c': tf.random_uniform([4, 14, 14, 576], dtype=tf.float32),
+ 'Mixed_5c': tf.random_uniform([4, 7, 7, 1024], dtype=tf.float32)
+ }
+ feature_map_generator = self._build_feature_map_generator(
+ feature_map_layout=INCEPTION_V2_LAYOUT,
+ )
+ def graph_fn():
+ return feature_map_generator(image_features)
- init_op = tf.global_variables_initializer()
- with self.test_session() as sess:
- sess.run(init_op)
- out_feature_maps = sess.run(feature_maps)
- out_feature_map_shapes = dict(
- (key, value.shape) for key, value in out_feature_maps.items())
- self.assertDictEqual(expected_feature_map_shapes, out_feature_map_shapes)
-
- def test_get_expected_variable_names_with_inception_v2(self, use_keras):
- image_features = {
- 'Mixed_3c': tf.random_uniform([4, 28, 28, 256], dtype=tf.float32),
- 'Mixed_4c': tf.random_uniform([4, 14, 14, 576], dtype=tf.float32),
- 'Mixed_5c': tf.random_uniform([4, 7, 7, 1024], dtype=tf.float32)
- }
- feature_map_generator = self._build_feature_map_generator(
- feature_map_layout=INCEPTION_V2_LAYOUT,
- use_keras=use_keras
- )
- feature_maps = feature_map_generator(image_features)
-
+ self.execute(graph_fn, [], g)
expected_slim_variables = set([
'Mixed_5c_1_Conv2d_3_1x1_256/weights',
'Mixed_5c_1_Conv2d_3_1x1_256/biases',
@@ -316,32 +297,32 @@ class MultiResolutionFeatureMapGeneratorTest(tf.test.TestCase):
'FeatureMaps/Mixed_5c_2_Conv2d_5_3x3_s2_256_conv/bias',
])
- init_op = tf.global_variables_initializer()
- with self.test_session() as sess:
- sess.run(init_op)
- sess.run(feature_maps)
+ if tf_version.is_tf2():
actual_variable_set = set(
- [var.op.name for var in tf.trainable_variables()])
- if use_keras:
- self.assertSetEqual(expected_keras_variables, actual_variable_set)
- else:
- self.assertSetEqual(expected_slim_variables, actual_variable_set)
+ [var.name.split(':')[0] for var in feature_map_generator.variables])
+ self.assertSetEqual(expected_keras_variables, actual_variable_set)
+ else:
+ with g.as_default():
+ actual_variable_set = set(
+ [var.op.name for var in tf.trainable_variables()])
+ self.assertSetEqual(expected_slim_variables, actual_variable_set)
def test_get_expected_variable_names_with_inception_v2_use_depthwise(
- self,
- use_keras):
- image_features = {
- 'Mixed_3c': tf.random_uniform([4, 28, 28, 256], dtype=tf.float32),
- 'Mixed_4c': tf.random_uniform([4, 14, 14, 576], dtype=tf.float32),
- 'Mixed_5c': tf.random_uniform([4, 7, 7, 1024], dtype=tf.float32)
- }
- layout_copy = INCEPTION_V2_LAYOUT.copy()
- layout_copy['use_depthwise'] = True
- feature_map_generator = self._build_feature_map_generator(
- feature_map_layout=layout_copy,
- use_keras=use_keras
- )
- feature_maps = feature_map_generator(image_features)
+ self):
+ with test_utils.GraphContextOrNone() as g:
+ image_features = {
+ 'Mixed_3c': tf.random_uniform([4, 28, 28, 256], dtype=tf.float32),
+ 'Mixed_4c': tf.random_uniform([4, 14, 14, 576], dtype=tf.float32),
+ 'Mixed_5c': tf.random_uniform([4, 7, 7, 1024], dtype=tf.float32)
+ }
+ layout_copy = INCEPTION_V2_LAYOUT.copy()
+ layout_copy['use_depthwise'] = True
+ feature_map_generator = self._build_feature_map_generator(
+ feature_map_layout=layout_copy,
+ )
+ def graph_fn():
+ return feature_map_generator(image_features)
+ self.execute(graph_fn, [], g)
expected_slim_variables = set([
'Mixed_5c_1_Conv2d_3_1x1_256/weights',
@@ -391,23 +372,20 @@ class MultiResolutionFeatureMapGeneratorTest(tf.test.TestCase):
'FeatureMaps/Mixed_5c_2_Conv2d_5_3x3_s2_256_conv/bias',
])
- init_op = tf.global_variables_initializer()
- with self.test_session() as sess:
- sess.run(init_op)
- sess.run(feature_maps)
+ if tf_version.is_tf2():
actual_variable_set = set(
- [var.op.name for var in tf.trainable_variables()])
- if use_keras:
- self.assertSetEqual(expected_keras_variables, actual_variable_set)
- else:
- self.assertSetEqual(expected_slim_variables, actual_variable_set)
+ [var.name.split(':')[0] for var in feature_map_generator.variables])
+ self.assertSetEqual(expected_keras_variables, actual_variable_set)
+ else:
+ with g.as_default():
+ actual_variable_set = set(
+ [var.op.name for var in tf.trainable_variables()])
+ self.assertSetEqual(expected_slim_variables, actual_variable_set)
-@parameterized.parameters({'use_native_resize_op': True, 'use_keras': False},
- {'use_native_resize_op': False, 'use_keras': False},
- {'use_native_resize_op': True, 'use_keras': True},
- {'use_native_resize_op': False, 'use_keras': True})
-class FPNFeatureMapGeneratorTest(tf.test.TestCase, parameterized.TestCase):
+@parameterized.parameters({'use_native_resize_op': True},
+ {'use_native_resize_op': False})
+class FPNFeatureMapGeneratorTest(test_case.TestCase, parameterized.TestCase):
def _build_conv_hyperparams(self):
conv_hyperparams = hyperparams_pb2.Hyperparams()
@@ -425,10 +403,10 @@ class FPNFeatureMapGeneratorTest(tf.test.TestCase, parameterized.TestCase):
return hyperparams_builder.KerasLayerHyperparams(conv_hyperparams)
def _build_feature_map_generator(
- self, image_features, depth, use_keras, use_bounded_activations=False,
+ self, image_features, depth, use_bounded_activations=False,
use_native_resize_op=False, use_explicit_padding=False,
use_depthwise=False):
- if use_keras:
+ if tf_version.is_tf2():
return feature_map_generators.KerasFpnTopDownFeatureMaps(
num_levels=len(image_features),
depth=depth,
@@ -454,19 +432,20 @@ class FPNFeatureMapGeneratorTest(tf.test.TestCase, parameterized.TestCase):
return feature_map_generator
def test_get_expected_feature_map_shapes(
- self, use_native_resize_op, use_keras):
- image_features = [
- ('block2', tf.random_uniform([4, 8, 8, 256], dtype=tf.float32)),
- ('block3', tf.random_uniform([4, 4, 4, 256], dtype=tf.float32)),
- ('block4', tf.random_uniform([4, 2, 2, 256], dtype=tf.float32)),
- ('block5', tf.random_uniform([4, 1, 1, 256], dtype=tf.float32))
- ]
- feature_map_generator = self._build_feature_map_generator(
- image_features=image_features,
- depth=128,
- use_keras=use_keras,
- use_native_resize_op=use_native_resize_op)
- feature_maps = feature_map_generator(image_features)
+ self, use_native_resize_op):
+ with test_utils.GraphContextOrNone() as g:
+ image_features = [
+ ('block2', tf.random_uniform([4, 8, 8, 256], dtype=tf.float32)),
+ ('block3', tf.random_uniform([4, 4, 4, 256], dtype=tf.float32)),
+ ('block4', tf.random_uniform([4, 2, 2, 256], dtype=tf.float32)),
+ ('block5', tf.random_uniform([4, 1, 1, 256], dtype=tf.float32))
+ ]
+ feature_map_generator = self._build_feature_map_generator(
+ image_features=image_features,
+ depth=128,
+ use_native_resize_op=use_native_resize_op)
+ def graph_fn():
+ return feature_map_generator(image_features)
expected_feature_map_shapes = {
'top_down_block2': (4, 8, 8, 128),
@@ -474,30 +453,27 @@ class FPNFeatureMapGeneratorTest(tf.test.TestCase, parameterized.TestCase):
'top_down_block4': (4, 2, 2, 128),
'top_down_block5': (4, 1, 1, 128)
}
-
- init_op = tf.global_variables_initializer()
- with self.test_session() as sess:
- sess.run(init_op)
- out_feature_maps = sess.run(feature_maps)
- out_feature_map_shapes = {key: value.shape
- for key, value in out_feature_maps.items()}
- self.assertDictEqual(out_feature_map_shapes, expected_feature_map_shapes)
+ out_feature_maps = self.execute(graph_fn, [], g)
+ out_feature_map_shapes = dict(
+ (key, value.shape) for key, value in out_feature_maps.items())
+ self.assertDictEqual(expected_feature_map_shapes, out_feature_map_shapes)
def test_get_expected_feature_map_shapes_with_explicit_padding(
- self, use_native_resize_op, use_keras):
- image_features = [
- ('block2', tf.random_uniform([4, 8, 8, 256], dtype=tf.float32)),
- ('block3', tf.random_uniform([4, 4, 4, 256], dtype=tf.float32)),
- ('block4', tf.random_uniform([4, 2, 2, 256], dtype=tf.float32)),
- ('block5', tf.random_uniform([4, 1, 1, 256], dtype=tf.float32))
- ]
- feature_map_generator = self._build_feature_map_generator(
- image_features=image_features,
- depth=128,
- use_keras=use_keras,
- use_explicit_padding=True,
- use_native_resize_op=use_native_resize_op)
- feature_maps = feature_map_generator(image_features)
+ self, use_native_resize_op):
+ with test_utils.GraphContextOrNone() as g:
+ image_features = [
+ ('block2', tf.random_uniform([4, 8, 8, 256], dtype=tf.float32)),
+ ('block3', tf.random_uniform([4, 4, 4, 256], dtype=tf.float32)),
+ ('block4', tf.random_uniform([4, 2, 2, 256], dtype=tf.float32)),
+ ('block5', tf.random_uniform([4, 1, 1, 256], dtype=tf.float32))
+ ]
+ feature_map_generator = self._build_feature_map_generator(
+ image_features=image_features,
+ depth=128,
+ use_explicit_padding=True,
+ use_native_resize_op=use_native_resize_op)
+ def graph_fn():
+ return feature_map_generator(image_features)
expected_feature_map_shapes = {
'top_down_block2': (4, 8, 8, 128),
@@ -505,19 +481,15 @@ class FPNFeatureMapGeneratorTest(tf.test.TestCase, parameterized.TestCase):
'top_down_block4': (4, 2, 2, 128),
'top_down_block5': (4, 1, 1, 128)
}
+ out_feature_maps = self.execute(graph_fn, [], g)
+ out_feature_map_shapes = dict(
+ (key, value.shape) for key, value in out_feature_maps.items())
+ self.assertDictEqual(expected_feature_map_shapes, out_feature_map_shapes)
- init_op = tf.global_variables_initializer()
- with self.test_session() as sess:
- sess.run(init_op)
- out_feature_maps = sess.run(feature_maps)
- out_feature_map_shapes = {key: value.shape
- for key, value in out_feature_maps.items()}
- self.assertDictEqual(out_feature_map_shapes, expected_feature_map_shapes)
-
+ @unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.')
def test_use_bounded_activations_add_operations(
- self, use_native_resize_op, use_keras):
- tf_graph = tf.Graph()
- with tf_graph.as_default():
+ self, use_native_resize_op):
+ with test_utils.GraphContextOrNone() as g:
image_features = [('block2',
tf.random_uniform([4, 8, 8, 256], dtype=tf.float32)),
('block3',
@@ -529,34 +501,23 @@ class FPNFeatureMapGeneratorTest(tf.test.TestCase, parameterized.TestCase):
feature_map_generator = self._build_feature_map_generator(
image_features=image_features,
depth=128,
- use_keras=use_keras,
use_bounded_activations=True,
use_native_resize_op=use_native_resize_op)
- feature_map_generator(image_features)
-
- if use_keras:
- expected_added_operations = dict.fromkeys([
- 'FeatureMaps/top_down/clip_by_value/clip_by_value',
- 'FeatureMaps/top_down/clip_by_value_1/clip_by_value',
- 'FeatureMaps/top_down/clip_by_value_2/clip_by_value',
- 'FeatureMaps/top_down/clip_by_value_3/clip_by_value',
- 'FeatureMaps/top_down/clip_by_value_4/clip_by_value',
- 'FeatureMaps/top_down/clip_by_value_5/clip_by_value',
- 'FeatureMaps/top_down/clip_by_value_6/clip_by_value',
- ])
- else:
- expected_added_operations = dict.fromkeys([
- 'top_down/clip_by_value', 'top_down/clip_by_value_1',
- 'top_down/clip_by_value_2', 'top_down/clip_by_value_3',
- 'top_down/clip_by_value_4', 'top_down/clip_by_value_5',
- 'top_down/clip_by_value_6'
- ])
-
- op_names = {op.name: None for op in tf_graph.get_operations()}
- self.assertDictContainsSubset(expected_added_operations, op_names)
+ def graph_fn():
+ return feature_map_generator(image_features)
+ self.execute(graph_fn, [], g)
+ expected_added_operations = dict.fromkeys([
+ 'top_down/clip_by_value', 'top_down/clip_by_value_1',
+ 'top_down/clip_by_value_2', 'top_down/clip_by_value_3',
+ 'top_down/clip_by_value_4', 'top_down/clip_by_value_5',
+ 'top_down/clip_by_value_6'
+ ])
+ op_names = {op.name: None for op in g.get_operations()}
+ self.assertDictContainsSubset(expected_added_operations, op_names)
+ @unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.')
def test_use_bounded_activations_clip_value(
- self, use_native_resize_op, use_keras):
+ self, use_native_resize_op):
tf_graph = tf.Graph()
with tf_graph.as_default():
image_features = [
@@ -568,28 +529,16 @@ class FPNFeatureMapGeneratorTest(tf.test.TestCase, parameterized.TestCase):
feature_map_generator = self._build_feature_map_generator(
image_features=image_features,
depth=128,
- use_keras=use_keras,
use_bounded_activations=True,
use_native_resize_op=use_native_resize_op)
feature_map_generator(image_features)
- if use_keras:
- expected_clip_by_value_ops = dict.fromkeys([
- 'FeatureMaps/top_down/clip_by_value/clip_by_value',
- 'FeatureMaps/top_down/clip_by_value_1/clip_by_value',
- 'FeatureMaps/top_down/clip_by_value_2/clip_by_value',
- 'FeatureMaps/top_down/clip_by_value_3/clip_by_value',
- 'FeatureMaps/top_down/clip_by_value_4/clip_by_value',
- 'FeatureMaps/top_down/clip_by_value_5/clip_by_value',
- 'FeatureMaps/top_down/clip_by_value_6/clip_by_value',
- ])
- else:
- expected_clip_by_value_ops = [
- 'top_down/clip_by_value', 'top_down/clip_by_value_1',
- 'top_down/clip_by_value_2', 'top_down/clip_by_value_3',
- 'top_down/clip_by_value_4', 'top_down/clip_by_value_5',
- 'top_down/clip_by_value_6'
- ]
+ expected_clip_by_value_ops = [
+ 'top_down/clip_by_value', 'top_down/clip_by_value_1',
+ 'top_down/clip_by_value_2', 'top_down/clip_by_value_3',
+ 'top_down/clip_by_value_4', 'top_down/clip_by_value_5',
+ 'top_down/clip_by_value_6'
+ ]
# Gathers activation tensors before and after clip_by_value operations.
activations = {}
@@ -631,20 +580,21 @@ class FPNFeatureMapGeneratorTest(tf.test.TestCase, parameterized.TestCase):
self.assertLessEqual(after_clipping_upper_bound, expected_upper_bound)
def test_get_expected_feature_map_shapes_with_depthwise(
- self, use_native_resize_op, use_keras):
- image_features = [
- ('block2', tf.random_uniform([4, 8, 8, 256], dtype=tf.float32)),
- ('block3', tf.random_uniform([4, 4, 4, 256], dtype=tf.float32)),
- ('block4', tf.random_uniform([4, 2, 2, 256], dtype=tf.float32)),
- ('block5', tf.random_uniform([4, 1, 1, 256], dtype=tf.float32))
- ]
- feature_map_generator = self._build_feature_map_generator(
- image_features=image_features,
- depth=128,
- use_keras=use_keras,
- use_depthwise=True,
- use_native_resize_op=use_native_resize_op)
- feature_maps = feature_map_generator(image_features)
+ self, use_native_resize_op):
+ with test_utils.GraphContextOrNone() as g:
+ image_features = [
+ ('block2', tf.random_uniform([4, 8, 8, 256], dtype=tf.float32)),
+ ('block3', tf.random_uniform([4, 4, 4, 256], dtype=tf.float32)),
+ ('block4', tf.random_uniform([4, 2, 2, 256], dtype=tf.float32)),
+ ('block5', tf.random_uniform([4, 1, 1, 256], dtype=tf.float32))
+ ]
+ feature_map_generator = self._build_feature_map_generator(
+ image_features=image_features,
+ depth=128,
+ use_depthwise=True,
+ use_native_resize_op=use_native_resize_op)
+ def graph_fn():
+ return feature_map_generator(image_features)
expected_feature_map_shapes = {
'top_down_block2': (4, 8, 8, 128),
@@ -652,30 +602,27 @@ class FPNFeatureMapGeneratorTest(tf.test.TestCase, parameterized.TestCase):
'top_down_block4': (4, 2, 2, 128),
'top_down_block5': (4, 1, 1, 128)
}
-
- init_op = tf.global_variables_initializer()
- with self.test_session() as sess:
- sess.run(init_op)
- out_feature_maps = sess.run(feature_maps)
- out_feature_map_shapes = {key: value.shape
- for key, value in out_feature_maps.items()}
- self.assertDictEqual(out_feature_map_shapes, expected_feature_map_shapes)
+ out_feature_maps = self.execute(graph_fn, [], g)
+ out_feature_map_shapes = dict(
+ (key, value.shape) for key, value in out_feature_maps.items())
+ self.assertDictEqual(expected_feature_map_shapes, out_feature_map_shapes)
def test_get_expected_variable_names(
- self, use_native_resize_op, use_keras):
- image_features = [
- ('block2', tf.random_uniform([4, 8, 8, 256], dtype=tf.float32)),
- ('block3', tf.random_uniform([4, 4, 4, 256], dtype=tf.float32)),
- ('block4', tf.random_uniform([4, 2, 2, 256], dtype=tf.float32)),
- ('block5', tf.random_uniform([4, 1, 1, 256], dtype=tf.float32))
- ]
- feature_map_generator = self._build_feature_map_generator(
- image_features=image_features,
- depth=128,
- use_keras=use_keras,
- use_native_resize_op=use_native_resize_op)
- feature_maps = feature_map_generator(image_features)
-
+ self, use_native_resize_op):
+ with test_utils.GraphContextOrNone() as g:
+ image_features = [
+ ('block2', tf.random_uniform([4, 8, 8, 256], dtype=tf.float32)),
+ ('block3', tf.random_uniform([4, 4, 4, 256], dtype=tf.float32)),
+ ('block4', tf.random_uniform([4, 2, 2, 256], dtype=tf.float32)),
+ ('block5', tf.random_uniform([4, 1, 1, 256], dtype=tf.float32))
+ ]
+ feature_map_generator = self._build_feature_map_generator(
+ image_features=image_features,
+ depth=128,
+ use_native_resize_op=use_native_resize_op)
+ def graph_fn():
+ return feature_map_generator(image_features)
+ self.execute(graph_fn, [], g)
expected_slim_variables = set([
'projection_1/weights',
'projection_1/biases',
@@ -709,33 +656,34 @@ class FPNFeatureMapGeneratorTest(tf.test.TestCase, parameterized.TestCase):
'FeatureMaps/top_down/smoothing_3_conv/kernel',
'FeatureMaps/top_down/smoothing_3_conv/bias'
])
- init_op = tf.global_variables_initializer()
- with self.test_session() as sess:
- sess.run(init_op)
- sess.run(feature_maps)
+
+ if tf_version.is_tf2():
actual_variable_set = set(
- [var.op.name for var in tf.trainable_variables()])
- if use_keras:
- self.assertSetEqual(expected_keras_variables, actual_variable_set)
- else:
- self.assertSetEqual(expected_slim_variables, actual_variable_set)
+ [var.name.split(':')[0] for var in feature_map_generator.variables])
+ self.assertSetEqual(expected_keras_variables, actual_variable_set)
+ else:
+ with g.as_default():
+ actual_variable_set = set(
+ [var.op.name for var in tf.trainable_variables()])
+ self.assertSetEqual(expected_slim_variables, actual_variable_set)
def test_get_expected_variable_names_with_depthwise(
- self, use_native_resize_op, use_keras):
- image_features = [
- ('block2', tf.random_uniform([4, 8, 8, 256], dtype=tf.float32)),
- ('block3', tf.random_uniform([4, 4, 4, 256], dtype=tf.float32)),
- ('block4', tf.random_uniform([4, 2, 2, 256], dtype=tf.float32)),
- ('block5', tf.random_uniform([4, 1, 1, 256], dtype=tf.float32))
- ]
- feature_map_generator = self._build_feature_map_generator(
- image_features=image_features,
- depth=128,
- use_keras=use_keras,
- use_depthwise=True,
- use_native_resize_op=use_native_resize_op)
- feature_maps = feature_map_generator(image_features)
-
+ self, use_native_resize_op):
+ with test_utils.GraphContextOrNone() as g:
+ image_features = [
+ ('block2', tf.random_uniform([4, 8, 8, 256], dtype=tf.float32)),
+ ('block3', tf.random_uniform([4, 4, 4, 256], dtype=tf.float32)),
+ ('block4', tf.random_uniform([4, 2, 2, 256], dtype=tf.float32)),
+ ('block5', tf.random_uniform([4, 1, 1, 256], dtype=tf.float32))
+ ]
+ feature_map_generator = self._build_feature_map_generator(
+ image_features=image_features,
+ depth=128,
+ use_depthwise=True,
+ use_native_resize_op=use_native_resize_op)
+ def graph_fn():
+ return feature_map_generator(image_features)
+ self.execute(graph_fn, [], g)
expected_slim_variables = set([
'projection_1/weights',
'projection_1/biases',
@@ -775,16 +723,16 @@ class FPNFeatureMapGeneratorTest(tf.test.TestCase, parameterized.TestCase):
'FeatureMaps/top_down/smoothing_3_depthwise_conv/pointwise_kernel',
'FeatureMaps/top_down/smoothing_3_depthwise_conv/bias'
])
- init_op = tf.global_variables_initializer()
- with self.test_session() as sess:
- sess.run(init_op)
- sess.run(feature_maps)
+
+ if tf_version.is_tf2():
actual_variable_set = set(
- [var.op.name for var in tf.trainable_variables()])
- if use_keras:
- self.assertSetEqual(expected_keras_variables, actual_variable_set)
- else:
- self.assertSetEqual(expected_slim_variables, actual_variable_set)
+ [var.name.split(':')[0] for var in feature_map_generator.variables])
+ self.assertSetEqual(expected_keras_variables, actual_variable_set)
+ else:
+ with g.as_default():
+ actual_variable_set = set(
+ [var.op.name for var in tf.trainable_variables()])
+ self.assertSetEqual(expected_slim_variables, actual_variable_set)
class GetDepthFunctionTest(tf.test.TestCase):
@@ -804,6 +752,7 @@ class GetDepthFunctionTest(tf.test.TestCase):
{'replace_pool_with_conv': False},
{'replace_pool_with_conv': True},
)
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.')
class PoolingPyramidFeatureMapGeneratorTest(tf.test.TestCase):
def test_get_expected_feature_map_shapes(self, replace_pool_with_conv):
diff --git a/research/object_detection/models/keras_models/convert_keras_models.py b/research/object_detection/models/keras_models/convert_keras_models.py
new file mode 100644
index 0000000000000000000000000000000000000000..a34af981b37032115bf0c3e957e0f4c216504d4c
--- /dev/null
+++ b/research/object_detection/models/keras_models/convert_keras_models.py
@@ -0,0 +1,85 @@
+# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Write keras weights into a tensorflow checkpoint.
+
+The imagenet weights in `keras.applications` are downloaded from github.
+This script converts them into the tensorflow checkpoint format and stores them
+on disk where they can be easily accessible during training.
+"""
+
+from __future__ import print_function
+
+import os
+
+from absl import app
+import numpy as np
+import tensorflow.compat.v1 as tf
+
+FLAGS = tf.flags.FLAGS
+
+
+tf.flags.DEFINE_string('model', 'resnet_v2_101',
+ 'The model to load. The following are supported: '
+ '"resnet_v1_50", "resnet_v1_101", "resnet_v2_50", '
+ '"resnet_v2_101"')
+tf.flags.DEFINE_string('output_path', None,
+ 'The directory to output weights in.')
+tf.flags.DEFINE_boolean('verify_weights', True,
+ ('Verify the weights are loaded correctly by making '
+ 'sure the predictions are the same before and after '
+ 'saving.'))
+
+
+def init_model(name):
+ """Creates a Keras Model with the specific ResNet version."""
+ if name == 'resnet_v1_50':
+ model = tf.keras.applications.ResNet50(weights='imagenet')
+ elif name == 'resnet_v1_101':
+ model = tf.keras.applications.ResNet101(weights='imagenet')
+ elif name == 'resnet_v2_50':
+ model = tf.keras.applications.ResNet50V2(weights='imagenet')
+ elif name == 'resnet_v2_101':
+ model = tf.keras.applications.ResNet101V2(weights='imagenet')
+ else:
+ raise ValueError('Model {} not supported'.format(FLAGS.model))
+
+ return model
+
+
+def main(_):
+
+ model = init_model(FLAGS.model)
+
+ path = os.path.join(FLAGS.output_path, FLAGS.model)
+ tf.gfile.MakeDirs(path)
+ weights_path = os.path.join(path, 'weights')
+ ckpt = tf.train.Checkpoint(feature_extractor=model)
+ saved_path = ckpt.save(weights_path)
+
+ if FLAGS.verify_weights:
+ imgs = np.random.randn(1, 224, 224, 3).astype(np.float32)
+ keras_preds = model(imgs)
+
+ model = init_model(FLAGS.model)
+ ckpt.restore(saved_path)
+ loaded_weights_pred = model(imgs).numpy()
+
+ if not np.all(np.isclose(keras_preds, loaded_weights_pred)):
+ raise RuntimeError('The model was not saved correctly.')
+
+
+if __name__ == '__main__':
+ tf.enable_v2_behavior()
+ app.run(main)
diff --git a/research/object_detection/models/keras_models/hourglass_network.py b/research/object_detection/models/keras_models/hourglass_network.py
index d216b1669e3864083ff477a395f48c596172e356..09fb8ed4f4fb6f4b2712d8403ba1b94985ad25ad 100644
--- a/research/object_detection/models/keras_models/hourglass_network.py
+++ b/research/object_detection/models/keras_models/hourglass_network.py
@@ -43,6 +43,15 @@ def _get_padding_for_kernel_size(kernel_size):
kernel_size))
+def batchnorm():
+ try:
+ return tf.keras.layers.experimental.SyncBatchNormalization(
+ name='batchnorm', epsilon=1e-5, momentum=0.1)
+ except AttributeError:
+ return tf.keras.layers.BatchNormalization(
+ name='batchnorm', epsilon=1e-5, momentum=0.1, fused=BATCH_NORM_FUSED)
+
+
class ConvolutionalBlock(tf.keras.layers.Layer):
"""Block that aggregates Convolution + Norm layer + ReLU."""
@@ -73,8 +82,7 @@ class ConvolutionalBlock(tf.keras.layers.Layer):
filters=out_channels, kernel_size=kernel_size, use_bias=False,
strides=stride, padding=padding)
- self.norm = tf.keras.layers.experimental.SyncBatchNormalization(
- name='batchnorm', epsilon=1e-5, momentum=0.1)
+ self.norm = batchnorm()
if relu:
self.relu = tf.keras.layers.ReLU()
@@ -124,8 +132,7 @@ class ResidualBlock(tf.keras.layers.Layer):
self.conv = tf.keras.layers.Conv2D(
filters=out_channels, kernel_size=kernel_size, use_bias=False,
strides=1, padding=padding)
- self.norm = tf.keras.layers.experimental.SyncBatchNormalization(
- name='batchnorm', epsilon=1e-5, momentum=0.1)
+ self.norm = batchnorm()
if skip_conv:
self.skip = SkipConvolution(out_channels=out_channels,
diff --git a/research/object_detection/models/keras_models/hourglass_network_test.py b/research/object_detection/models/keras_models/hourglass_network_tf2_test.py
similarity index 96%
rename from research/object_detection/models/keras_models/hourglass_network_test.py
rename to research/object_detection/models/keras_models/hourglass_network_tf2_test.py
index 2e05eb9924b19437b91f45c4454f72df7f0b1318..d90b950ecd4102a260643391de6a4475ed959c0f 100644
--- a/research/object_detection/models/keras_models/hourglass_network_test.py
+++ b/research/object_detection/models/keras_models/hourglass_network_tf2_test.py
@@ -13,14 +13,16 @@
# limitations under the License.
# ==============================================================================
"""Testing the Hourglass network."""
-
+import unittest
from absl.testing import parameterized
import numpy as np
import tensorflow.compat.v1 as tf
from object_detection.models.keras_models import hourglass_network as hourglass
+from object_detection.utils import tf_version
+@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.')
class HourglassFeatureExtractorTest(tf.test.TestCase, parameterized.TestCase):
def test_identity_layer(self):
@@ -95,5 +97,4 @@ class HourglassFeatureExtractorTest(tf.test.TestCase, parameterized.TestCase):
if __name__ == '__main__':
- tf.enable_v2_behavior()
tf.test.main()
diff --git a/research/object_detection/models/keras_models/inception_resnet_v2_test.py b/research/object_detection/models/keras_models/inception_resnet_v2_tf2_test.py
similarity index 97%
rename from research/object_detection/models/keras_models/inception_resnet_v2_test.py
rename to research/object_detection/models/keras_models/inception_resnet_v2_tf2_test.py
index 5706e679c74cc7d30617940325597489c9a89245..4cbcc54ad66985920e7739888b3542b6a1e48bca 100644
--- a/research/object_detection/models/keras_models/inception_resnet_v2_test.py
+++ b/research/object_detection/models/keras_models/inception_resnet_v2_tf2_test.py
@@ -30,13 +30,14 @@ consistent.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
-
+import unittest
import numpy as np
from six.moves import zip
import tensorflow.compat.v1 as tf
from object_detection.models.keras_models import inception_resnet_v2
from object_detection.utils import test_case
+from object_detection.utils import tf_version
_KERAS_TO_SLIM_ENDPOINT_NAMES = {
'activation': 'Conv2d_1a_3x3',
@@ -100,6 +101,7 @@ _NUM_CHANNELS = 3
_BATCH_SIZE = 2
+@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.')
class InceptionResnetV2Test(test_case.TestCase):
def _create_application_with_layer_outputs(
@@ -166,8 +168,7 @@ class InceptionResnetV2Test(test_case.TestCase):
model = self._create_application_with_layer_outputs(
layer_names=layer_names,
batchnorm_training=False)
- preprocessed_inputs = tf.placeholder(
- tf.float32, (4, None, None, _NUM_CHANNELS))
+ preprocessed_inputs = tf.random.uniform([4, 40, 40, _NUM_CHANNELS])
model(preprocessed_inputs)
return model.variables
diff --git a/research/object_detection/models/keras_models/mobilenet_v1_test.py b/research/object_detection/models/keras_models/mobilenet_v1_tf2_test.py
similarity index 85%
rename from research/object_detection/models/keras_models/mobilenet_v1_test.py
rename to research/object_detection/models/keras_models/mobilenet_v1_tf2_test.py
index 72cc1f144dd0a914a4aaf388b90b339c13bd65c5..7e46999d9dfd2fc4ddcd2c432f5ecc2a07f3a9eb 100644
--- a/research/object_detection/models/keras_models/mobilenet_v1_test.py
+++ b/research/object_detection/models/keras_models/mobilenet_v1_tf2_test.py
@@ -29,7 +29,7 @@ consistent.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
-
+import unittest
import numpy as np
from six.moves import zip
import tensorflow.compat.v1 as tf
@@ -42,6 +42,7 @@ from object_detection.models.keras_models import model_utils
from object_detection.models.keras_models import test_utils
from object_detection.protos import hyperparams_pb2
from object_detection.utils import test_case
+from object_detection.utils import tf_version
_KERAS_LAYERS_TO_CHECK = [
'conv1_relu',
@@ -64,6 +65,7 @@ _NUM_CHANNELS = 3
_BATCH_SIZE = 2
+@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.')
class MobilenetV1Test(test_case.TestCase):
def _build_conv_hyperparams(self):
@@ -118,19 +120,17 @@ class MobilenetV1Test(test_case.TestCase):
self, image_height, image_width, depth_multiplier,
expected_feature_map_shape, use_explicit_padding=False, min_depth=8,
layer_names=None, conv_defs=None):
- def graph_fn(image_tensor):
- model = self._create_application_with_layer_outputs(
- layer_names=layer_names,
- batchnorm_training=False,
- use_explicit_padding=use_explicit_padding,
- min_depth=min_depth,
- alpha=depth_multiplier,
- conv_defs=conv_defs)
- return model(image_tensor)
+ model = self._create_application_with_layer_outputs(
+ layer_names=layer_names,
+ batchnorm_training=False,
+ use_explicit_padding=use_explicit_padding,
+ min_depth=min_depth,
+ alpha=depth_multiplier,
+ conv_defs=conv_defs)
image_tensor = np.random.rand(_BATCH_SIZE, image_height, image_width,
_NUM_CHANNELS).astype(np.float32)
- feature_maps = self.execute(graph_fn, [image_tensor])
+ feature_maps = model(image_tensor)
for feature_map, expected_shape in zip(feature_maps,
expected_feature_map_shape):
@@ -140,36 +140,29 @@ class MobilenetV1Test(test_case.TestCase):
self, image_height, image_width, depth_multiplier,
expected_feature_map_shape, use_explicit_padding=False, min_depth=8,
layer_names=None):
- def graph_fn(image_height, image_width):
- image_tensor = tf.random_uniform([_BATCH_SIZE, image_height, image_width,
- _NUM_CHANNELS], dtype=tf.float32)
- model = self._create_application_with_layer_outputs(
- layer_names=layer_names,
- batchnorm_training=False,
- use_explicit_padding=use_explicit_padding,
- alpha=depth_multiplier)
- return model(image_tensor)
+ image_tensor = tf.random_uniform([_BATCH_SIZE, image_height, image_width,
+ _NUM_CHANNELS], dtype=tf.float32)
+ model = self._create_application_with_layer_outputs(
+ layer_names=layer_names,
+ batchnorm_training=False,
+ use_explicit_padding=use_explicit_padding,
+ alpha=depth_multiplier)
- feature_maps = self.execute_cpu(graph_fn, [
- np.array(image_height, dtype=np.int32),
- np.array(image_width, dtype=np.int32)
- ])
+ feature_maps = model(image_tensor)
for feature_map, expected_shape in zip(feature_maps,
expected_feature_map_shape):
self.assertAllEqual(feature_map.shape, expected_shape)
def _get_variables(self, depth_multiplier, layer_names=None):
- g = tf.Graph()
- with g.as_default():
- preprocessed_inputs = tf.placeholder(
- tf.float32, (4, None, None, _NUM_CHANNELS))
- model = self._create_application_with_layer_outputs(
- layer_names=layer_names,
- batchnorm_training=False, use_explicit_padding=False,
- alpha=depth_multiplier)
- model(preprocessed_inputs)
- return g.get_collection(tf.GraphKeys.GLOBAL_VARIABLES)
+ tf.keras.backend.clear_session()
+ model = self._create_application_with_layer_outputs(
+ layer_names=layer_names,
+ batchnorm_training=False, use_explicit_padding=False,
+ alpha=depth_multiplier)
+ preprocessed_inputs = tf.random.uniform([2, 40, 40, 3])
+ model(preprocessed_inputs)
+ return model.variables
def test_returns_correct_shapes_128(self):
image_height = 128
diff --git a/research/object_detection/models/keras_models/mobilenet_v2_test.py b/research/object_detection/models/keras_models/mobilenet_v2_tf2_test.py
similarity index 84%
rename from research/object_detection/models/keras_models/mobilenet_v2_test.py
rename to research/object_detection/models/keras_models/mobilenet_v2_tf2_test.py
index cfdd119781dc3d73efdb457dc55949506cdaf1bb..2a53a9b63f28522197bc3daab29dab3a56dfb994 100644
--- a/research/object_detection/models/keras_models/mobilenet_v2_test.py
+++ b/research/object_detection/models/keras_models/mobilenet_v2_tf2_test.py
@@ -18,7 +18,7 @@
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
-
+import unittest
import numpy as np
from six.moves import zip
import tensorflow.compat.v1 as tf
@@ -31,6 +31,7 @@ from object_detection.models.keras_models import model_utils
from object_detection.models.keras_models import test_utils
from object_detection.protos import hyperparams_pb2
from object_detection.utils import test_case
+from object_detection.utils import tf_version
_layers_to_check = [
'Conv1_relu',
@@ -53,6 +54,7 @@ _layers_to_check = [
'out_relu']
+@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.')
class MobilenetV2Test(test_case.TestCase):
def _build_conv_hyperparams(self):
@@ -86,6 +88,8 @@ class MobilenetV2Test(test_case.TestCase):
min_depth=None,
conv_defs=None):
"""Constructs Keras mobilenetv2 that extracts intermediate layer outputs."""
+ # Have to clear the Keras backend to ensure isolation in layer naming
+ tf.keras.backend.clear_session()
if not layer_names:
layer_names = _layers_to_check
full_model = mobilenet_v2.mobilenet_v2(
@@ -107,19 +111,17 @@ class MobilenetV2Test(test_case.TestCase):
self, batch_size, image_height, image_width, depth_multiplier,
expected_feature_map_shapes, use_explicit_padding=False, min_depth=None,
layer_names=None, conv_defs=None):
- def graph_fn(image_tensor):
- model = self._create_application_with_layer_outputs(
- layer_names=layer_names,
- batchnorm_training=False,
- use_explicit_padding=use_explicit_padding,
- min_depth=min_depth,
- alpha=depth_multiplier,
- conv_defs=conv_defs)
- return model(image_tensor)
+ model = self._create_application_with_layer_outputs(
+ layer_names=layer_names,
+ batchnorm_training=False,
+ use_explicit_padding=use_explicit_padding,
+ min_depth=min_depth,
+ alpha=depth_multiplier,
+ conv_defs=conv_defs)
image_tensor = np.random.rand(batch_size, image_height, image_width,
3).astype(np.float32)
- feature_maps = self.execute(graph_fn, [image_tensor])
+ feature_maps = model([image_tensor])
for feature_map, expected_shape in zip(feature_maps,
expected_feature_map_shapes):
@@ -129,34 +131,30 @@ class MobilenetV2Test(test_case.TestCase):
self, batch_size, image_height, image_width, depth_multiplier,
expected_feature_map_shapes, use_explicit_padding=False,
layer_names=None):
- def graph_fn(image_height, image_width):
- image_tensor = tf.random_uniform([batch_size, image_height, image_width,
- 3], dtype=tf.float32)
- model = self._create_application_with_layer_outputs(
- layer_names=layer_names,
- batchnorm_training=False, use_explicit_padding=use_explicit_padding,
- alpha=depth_multiplier)
- return model(image_tensor)
-
- feature_maps = self.execute_cpu(graph_fn, [
- np.array(image_height, dtype=np.int32),
- np.array(image_width, dtype=np.int32)
- ])
-
+ height = tf.random.uniform([], minval=image_height, maxval=image_height+1,
+ dtype=tf.int32)
+ width = tf.random.uniform([], minval=image_width, maxval=image_width+1,
+ dtype=tf.int32)
+ image_tensor = tf.random.uniform([batch_size, height, width,
+ 3], dtype=tf.float32)
+ model = self._create_application_with_layer_outputs(
+ layer_names=layer_names,
+ batchnorm_training=False, use_explicit_padding=use_explicit_padding,
+ alpha=depth_multiplier)
+ feature_maps = model(image_tensor)
for feature_map, expected_shape in zip(feature_maps,
expected_feature_map_shapes):
self.assertAllEqual(feature_map.shape, expected_shape)
def _get_variables(self, depth_multiplier, layer_names=None):
- g = tf.Graph()
- with g.as_default():
- preprocessed_inputs = tf.placeholder(tf.float32, (4, None, None, 3))
- model = self._create_application_with_layer_outputs(
- layer_names=layer_names,
- batchnorm_training=False, use_explicit_padding=False,
- alpha=depth_multiplier)
- model(preprocessed_inputs)
- return g.get_collection(tf.GraphKeys.GLOBAL_VARIABLES)
+ tf.keras.backend.clear_session()
+ model = self._create_application_with_layer_outputs(
+ layer_names=layer_names,
+ batchnorm_training=False, use_explicit_padding=False,
+ alpha=depth_multiplier)
+ preprocessed_inputs = tf.random.uniform([2, 40, 40, 3])
+ model(preprocessed_inputs)
+ return model.variables
def test_returns_correct_shapes_128(self):
image_height = 128
diff --git a/research/object_detection/models/keras_models/resnet_v1_test.py b/research/object_detection/models/keras_models/resnet_v1_tf2_test.py
similarity index 97%
rename from research/object_detection/models/keras_models/resnet_v1_test.py
rename to research/object_detection/models/keras_models/resnet_v1_tf2_test.py
index 7b0c2a8e049e01030f95f7e93aa27a1538d47830..71cc5f22bd994b6432957bf5b34837f829c9b8da 100644
--- a/research/object_detection/models/keras_models/resnet_v1_test.py
+++ b/research/object_detection/models/keras_models/resnet_v1_tf2_test.py
@@ -19,7 +19,7 @@ object detection. To verify the consistency of the two models, we compare:
1. Output shape of each layer given different inputs.
2. Number of global variables.
"""
-
+import unittest
import numpy as np
from six.moves import zip
import tensorflow.compat.v1 as tf
@@ -30,6 +30,7 @@ from object_detection.builders import hyperparams_builder
from object_detection.models.keras_models import resnet_v1
from object_detection.protos import hyperparams_pb2
from object_detection.utils import test_case
+from object_detection.utils import tf_version
_EXPECTED_SHAPES_224_RESNET50 = {
'conv2_block3_out': (4, 56, 56, 256),
@@ -65,6 +66,7 @@ _NUM_CHANNELS = 3
_BATCH_SIZE = 4
+@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.')
class ResnetV1Test(test_case.TestCase):
def _build_conv_hyperparams(self):
@@ -146,8 +148,7 @@ class ResnetV1Test(test_case.TestCase):
tf.keras.backend.clear_session()
model = self._create_application_with_layer_outputs(
model_index, batchnorm_training=False)
- preprocessed_inputs = tf.placeholder(tf.float32,
- (4, None, None, _NUM_CHANNELS))
+ preprocessed_inputs = tf.random.uniform([2, 40, 40, _NUM_CHANNELS])
model(preprocessed_inputs)
return model.variables
diff --git a/research/object_detection/models/ssd_feature_extractor_test.py b/research/object_detection/models/ssd_feature_extractor_test.py
index 913a9f6a51330a2c3a1ca60a35f04b3f98c38e18..29c43e376c6167b61a256eb0812ee4d3bcee3ed5 100644
--- a/research/object_detection/models/ssd_feature_extractor_test.py
+++ b/research/object_detection/models/ssd_feature_extractor_test.py
@@ -31,6 +31,7 @@ from google.protobuf import text_format
from object_detection.builders import hyperparams_builder
from object_detection.protos import hyperparams_pb2
from object_detection.utils import test_case
+from object_detection.utils import test_utils
class SsdFeatureExtractorTestBase(test_case.TestCase):
@@ -89,14 +90,13 @@ class SsdFeatureExtractorTestBase(test_case.TestCase):
"""
pass
- def _extract_features(self,
- image_tensor,
- depth_multiplier,
- pad_to_multiple,
- use_explicit_padding=False,
- use_depthwise=False,
- num_layers=6,
- use_keras=False):
+ def _create_features(self,
+ depth_multiplier,
+ pad_to_multiple,
+ use_explicit_padding=False,
+ use_depthwise=False,
+ num_layers=6,
+ use_keras=False):
kwargs = {}
if use_explicit_padding:
kwargs.update({'use_explicit_padding': use_explicit_padding})
@@ -110,6 +110,12 @@ class SsdFeatureExtractorTestBase(test_case.TestCase):
depth_multiplier,
pad_to_multiple,
**kwargs)
+ return feature_extractor
+
+ def _extract_features(self,
+ image_tensor,
+ feature_extractor,
+ use_keras=False):
if use_keras:
feature_maps = feature_extractor(image_tensor)
else:
@@ -127,10 +133,8 @@ class SsdFeatureExtractorTestBase(test_case.TestCase):
num_layers=6,
use_keras=False,
use_depthwise=False):
-
- def graph_fn(image_tensor):
- return self._extract_features(
- image_tensor,
+ with test_utils.GraphContextOrNone() as g:
+ feature_extractor = self._create_features(
depth_multiplier,
pad_to_multiple,
use_explicit_padding=use_explicit_padding,
@@ -138,9 +142,15 @@ class SsdFeatureExtractorTestBase(test_case.TestCase):
use_keras=use_keras,
use_depthwise=use_depthwise)
+ def graph_fn(image_tensor):
+ return self._extract_features(
+ image_tensor,
+ feature_extractor,
+ use_keras=use_keras)
+
image_tensor = np.random.rand(batch_size, image_height, image_width,
3).astype(np.float32)
- feature_maps = self.execute(graph_fn, [image_tensor])
+ feature_maps = self.execute(graph_fn, [image_tensor], graph=g)
for feature_map, expected_shape in zip(
feature_maps, expected_feature_map_shapes):
self.assertAllEqual(feature_map.shape, expected_shape)
@@ -158,11 +168,8 @@ class SsdFeatureExtractorTestBase(test_case.TestCase):
use_keras=False,
use_depthwise=False):
- def graph_fn(image_height, image_width):
- image_tensor = tf.random_uniform([batch_size, image_height, image_width,
- 3], dtype=tf.float32)
- return self._extract_features(
- image_tensor,
+ with test_utils.GraphContextOrNone() as g:
+ feature_extractor = self._create_features(
depth_multiplier,
pad_to_multiple,
use_explicit_padding=use_explicit_padding,
@@ -170,10 +177,18 @@ class SsdFeatureExtractorTestBase(test_case.TestCase):
use_keras=use_keras,
use_depthwise=use_depthwise)
+ def graph_fn(image_height, image_width):
+ image_tensor = tf.random_uniform([batch_size, image_height, image_width,
+ 3], dtype=tf.float32)
+ return self._extract_features(
+ image_tensor,
+ feature_extractor,
+ use_keras=use_keras)
+
feature_maps = self.execute_cpu(graph_fn, [
np.array(image_height, dtype=np.int32),
np.array(image_width, dtype=np.int32)
- ])
+ ], graph=g)
for feature_map, expected_shape in zip(
feature_maps, expected_feature_map_shapes):
self.assertAllEqual(feature_map.shape, expected_shape)
@@ -186,19 +201,33 @@ class SsdFeatureExtractorTestBase(test_case.TestCase):
pad_to_multiple,
use_keras=False,
use_depthwise=False):
- preprocessed_inputs = tf.placeholder(tf.float32, (4, None, None, 3))
- feature_maps = self._extract_features(
- preprocessed_inputs,
- depth_multiplier,
- pad_to_multiple,
- use_keras=use_keras,
- use_depthwise=use_depthwise)
- test_preprocessed_image = np.random.rand(4, image_height, image_width, 3)
- with self.test_session() as sess:
- sess.run(tf.global_variables_initializer())
+
+ with test_utils.GraphContextOrNone() as g:
+ batch = 4
+ width = tf.random.uniform([], minval=image_width, maxval=image_width+1,
+ dtype=tf.int32)
+ height = tf.random.uniform([], minval=image_height, maxval=image_height+1,
+ dtype=tf.int32)
+ shape = tf.stack([batch, height, width, 3])
+ preprocessed_inputs = tf.random.uniform(shape)
+ feature_extractor = self._create_features(
+ depth_multiplier,
+ pad_to_multiple,
+ use_keras=use_keras,
+ use_depthwise=use_depthwise)
+
+ def graph_fn():
+ feature_maps = self._extract_features(
+ preprocessed_inputs,
+ feature_extractor,
+ use_keras=use_keras)
+ return feature_maps
+ if self.is_tf2():
+ with self.assertRaises(ValueError):
+ self.execute_cpu(graph_fn, [], graph=g)
+ else:
with self.assertRaises(tf.errors.InvalidArgumentError):
- sess.run(feature_maps,
- feed_dict={preprocessed_inputs: test_preprocessed_image})
+ self.execute_cpu(graph_fn, [], graph=g)
def check_feature_extractor_variables_under_scope(self,
depth_multiplier,
@@ -221,11 +250,14 @@ class SsdFeatureExtractorTestBase(test_case.TestCase):
use_depthwise=False):
g = tf.Graph()
with g.as_default():
- preprocessed_inputs = tf.placeholder(tf.float32, (4, None, None, 3))
- self._extract_features(
- preprocessed_inputs,
+ feature_extractor = self._create_features(
depth_multiplier,
pad_to_multiple,
use_keras=use_keras,
use_depthwise=use_depthwise)
+ preprocessed_inputs = tf.placeholder(tf.float32, (4, None, None, 3))
+ self._extract_features(
+ preprocessed_inputs,
+ feature_extractor,
+ use_keras=use_keras)
return g.get_collection(tf.GraphKeys.GLOBAL_VARIABLES)
diff --git a/research/object_detection/models/ssd_inception_v2_feature_extractor_test.py b/research/object_detection/models/ssd_inception_v2_feature_extractor_tf1_test.py
similarity index 98%
rename from research/object_detection/models/ssd_inception_v2_feature_extractor_test.py
rename to research/object_detection/models/ssd_inception_v2_feature_extractor_tf1_test.py
index 34921609c25849e704c21df5fcaccaf92290ca5e..1e33ed70ed45cef900d9f615cba9a5f196d36e23 100644
--- a/research/object_detection/models/ssd_inception_v2_feature_extractor_test.py
+++ b/research/object_detection/models/ssd_inception_v2_feature_extractor_tf1_test.py
@@ -14,13 +14,16 @@
# ==============================================================================
"""Tests for object_detection.models.ssd_inception_v2_feature_extractor."""
+import unittest
import numpy as np
import tensorflow.compat.v1 as tf
from object_detection.models import ssd_feature_extractor_test
from object_detection.models import ssd_inception_v2_feature_extractor
+from object_detection.utils import tf_version
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.')
class SsdInceptionV2FeatureExtractorTest(
ssd_feature_extractor_test.SsdFeatureExtractorTestBase):
diff --git a/research/object_detection/models/ssd_inception_v3_feature_extractor_test.py b/research/object_detection/models/ssd_inception_v3_feature_extractor_tf1_test.py
similarity index 98%
rename from research/object_detection/models/ssd_inception_v3_feature_extractor_test.py
rename to research/object_detection/models/ssd_inception_v3_feature_extractor_tf1_test.py
index 1e706c1e8c505588d76f712ad07f0720e97163fb..a0cbb451586b865cc448c292231a21dc468110a4 100644
--- a/research/object_detection/models/ssd_inception_v3_feature_extractor_test.py
+++ b/research/object_detection/models/ssd_inception_v3_feature_extractor_tf1_test.py
@@ -14,13 +14,16 @@
# ==============================================================================
"""Tests for object_detection.models.ssd_inception_v3_feature_extractor."""
+import unittest
import numpy as np
import tensorflow.compat.v1 as tf
from object_detection.models import ssd_feature_extractor_test
from object_detection.models import ssd_inception_v3_feature_extractor
+from object_detection.utils import tf_version
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.')
class SsdInceptionV3FeatureExtractorTest(
ssd_feature_extractor_test.SsdFeatureExtractorTestBase):
diff --git a/research/object_detection/models/ssd_mobiledet_feature_extractor.py b/research/object_detection/models/ssd_mobiledet_feature_extractor.py
index 33d7e053b62352d3c72bc98a3e82f2028d4e9cee..019d7543bb7b271d6158b6b30fbb69a7db5a99a8 100644
--- a/research/object_detection/models/ssd_mobiledet_feature_extractor.py
+++ b/research/object_detection/models/ssd_mobiledet_feature_extractor.py
@@ -290,6 +290,72 @@ def mobiledet_edgetpu_backbone(h, multiplier=1.0):
return endpoints
+def mobiledet_gpu_backbone(h, multiplier=1.0):
+ """Build a MobileDet GPU backbone."""
+
+ def _scale(filters):
+ return _scale_filters(filters, multiplier)
+
+ ibn = functools.partial(_inverted_bottleneck, activation_fn=tf.nn.relu6)
+ fused = functools.partial(_fused_conv, activation_fn=tf.nn.relu6)
+ tucker = functools.partial(_tucker_conv, activation_fn=tf.nn.relu6)
+
+ endpoints = {}
+ # block 0
+ h = _conv(h, _scale(32), 3, strides=2, activation_fn=tf.nn.relu6)
+
+ # block 1
+ h = tucker(
+ h,
+ _scale(16),
+ input_rank_ratio=0.25,
+ output_rank_ratio=0.25,
+ residual=False)
+ endpoints['C1'] = h
+
+ # block 2
+ h = fused(h, _scale(32), expansion=8, strides=2, residual=False)
+ h = tucker(h, _scale(32), input_rank_ratio=0.25, output_rank_ratio=0.25)
+ h = tucker(h, _scale(32), input_rank_ratio=0.25, output_rank_ratio=0.25)
+ h = tucker(h, _scale(32), input_rank_ratio=0.25, output_rank_ratio=0.25)
+ endpoints['C2'] = h
+
+ # block 3
+ h = fused(
+ h, _scale(64), expansion=8, kernel_size=3, strides=2, residual=False)
+ h = fused(h, _scale(64), expansion=8)
+ h = fused(h, _scale(64), expansion=8)
+ h = fused(h, _scale(64), expansion=4)
+ endpoints['C3'] = h
+
+ # block 4
+ h = fused(
+ h, _scale(128), expansion=8, kernel_size=3, strides=2, residual=False)
+ h = fused(h, _scale(128), expansion=4)
+ h = fused(h, _scale(128), expansion=4)
+ h = fused(h, _scale(128), expansion=4)
+
+ # block 5
+ h = fused(
+ h, _scale(128), expansion=8, kernel_size=3, strides=1, residual=False)
+ h = fused(h, _scale(128), expansion=8)
+ h = fused(h, _scale(128), expansion=8)
+ h = fused(h, _scale(128), expansion=8)
+ endpoints['C4'] = h
+
+ # block 6
+ h = fused(
+ h, _scale(128), expansion=4, kernel_size=3, strides=2, residual=False)
+ h = fused(h, _scale(128), expansion=4)
+ h = fused(h, _scale(128), expansion=4)
+ h = fused(h, _scale(128), expansion=4)
+
+ # block 7
+ h = ibn(h, _scale(384), expansion=8, kernel_size=3, strides=1, residual=False)
+ endpoints['C5'] = h
+ return endpoints
+
+
class SSDMobileDetFeatureExtractorBase(ssd_meta_arch.SSDFeatureExtractor):
"""Base class of SSD feature extractor using MobileDet features."""
@@ -490,3 +556,31 @@ class SSDMobileDetEdgeTPUFeatureExtractor(SSDMobileDetFeatureExtractorBase):
use_depthwise=use_depthwise,
override_base_feature_extractor_hyperparams=override_base_feature_extractor_hyperparams,
scope_name=scope_name)
+
+
+class SSDMobileDetGPUFeatureExtractor(SSDMobileDetFeatureExtractorBase):
+ """MobileDet-GPU feature extractor."""
+
+ def __init__(self,
+ is_training,
+ depth_multiplier,
+ min_depth,
+ pad_to_multiple,
+ conv_hyperparams_fn,
+ reuse_weights=None,
+ use_explicit_padding=False,
+ use_depthwise=False,
+ override_base_feature_extractor_hyperparams=False,
+ scope_name='MobileDetGPU'):
+ super(SSDMobileDetGPUFeatureExtractor, self).__init__(
+ backbone_fn=mobiledet_gpu_backbone,
+ is_training=is_training,
+ depth_multiplier=depth_multiplier,
+ min_depth=min_depth,
+ pad_to_multiple=pad_to_multiple,
+ conv_hyperparams_fn=conv_hyperparams_fn,
+ reuse_weights=reuse_weights,
+ use_explicit_padding=use_explicit_padding,
+ use_depthwise=use_depthwise,
+ override_base_feature_extractor_hyperparams=override_base_feature_extractor_hyperparams,
+ scope_name=scope_name)
diff --git a/research/object_detection/models/ssd_mobiledet_feature_extractor_test.py b/research/object_detection/models/ssd_mobiledet_feature_extractor_tf1_test.py
similarity index 86%
rename from research/object_detection/models/ssd_mobiledet_feature_extractor_test.py
rename to research/object_detection/models/ssd_mobiledet_feature_extractor_tf1_test.py
index c2c1ef6925373d3a36a7166a2df3c11b548519fb..2af37554b55f68e85ddbe7587b86015e10ac65e8 100644
--- a/research/object_detection/models/ssd_mobiledet_feature_extractor_test.py
+++ b/research/object_detection/models/ssd_mobiledet_feature_extractor_tf1_test.py
@@ -13,14 +13,20 @@
# limitations under the License.
# ==============================================================================
"""Tests for ssd_mobiledet_feature_extractor."""
-
+import unittest
import tensorflow.compat.v1 as tf
-from tensorflow.contrib import quantize as contrib_quantize
from object_detection.models import ssd_feature_extractor_test
from object_detection.models import ssd_mobiledet_feature_extractor
+from object_detection.utils import tf_version
+
+try:
+ from tensorflow.contrib import quantize as contrib_quantize # pylint: disable=g-import-not-at-top
+except: # pylint: disable=bare-except
+ pass
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.')
class SSDMobileDetFeatureExtractorTest(
ssd_feature_extractor_test.SsdFeatureExtractorTestBase):
@@ -105,6 +111,19 @@ class SSDMobileDetFeatureExtractorTest(
for expected_shape, x in zip(expected_feature_map_shapes, feature_maps):
self.assertTrue(x.shape.is_compatible_with(expected_shape))
+ def test_mobiledet_gpu_returns_correct_shapes(self):
+ expected_feature_map_shapes = [(2, 40, 20, 128), (2, 20, 10, 384),
+ (2, 10, 5, 512), (2, 5, 3, 256),
+ (2, 3, 2, 256), (2, 2, 1, 128)]
+ feature_extractor = self._create_feature_extractor(
+ ssd_mobiledet_feature_extractor.SSDMobileDetGPUFeatureExtractor)
+ image = tf.random.normal((2, 640, 320, 3))
+ feature_maps = feature_extractor.extract_features(image)
+
+ self.assertEqual(len(expected_feature_map_shapes), len(feature_maps))
+ for expected_shape, x in zip(expected_feature_map_shapes, feature_maps):
+ self.assertTrue(x.shape.is_compatible_with(expected_shape))
+
def _check_quantization(self, model_fn):
checkpoint_dir = self.get_temp_dir()
diff --git a/research/object_detection/models/ssd_mobilenet_edgetpu_feature_extractor_test.py b/research/object_detection/models/ssd_mobilenet_edgetpu_feature_extractor_tf1_test.py
similarity index 94%
rename from research/object_detection/models/ssd_mobilenet_edgetpu_feature_extractor_test.py
rename to research/object_detection/models/ssd_mobilenet_edgetpu_feature_extractor_tf1_test.py
index 186122028d20c0e65f9c7285d09a19e55115888f..841fe5a148864a0d62b52fd8f6f3e0059670dd57 100644
--- a/research/object_detection/models/ssd_mobilenet_edgetpu_feature_extractor_test.py
+++ b/research/object_detection/models/ssd_mobilenet_edgetpu_feature_extractor_tf1_test.py
@@ -13,13 +13,15 @@
# limitations under the License.
# ==============================================================================
"""Tests for ssd_mobilenet_edgetpu_feature_extractor."""
-
+import unittest
import tensorflow.compat.v1 as tf
from object_detection.models import ssd_mobilenet_edgetpu_feature_extractor
from object_detection.models import ssd_mobilenet_edgetpu_feature_extractor_testbase
+from object_detection.utils import tf_version
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.')
class SsdMobilenetEdgeTPUFeatureExtractorTest(
ssd_mobilenet_edgetpu_feature_extractor_testbase
._SsdMobilenetEdgeTPUFeatureExtractorTestBase):
diff --git a/research/object_detection/models/ssd_mobilenet_v1_feature_extractor_test.py b/research/object_detection/models/ssd_mobilenet_v1_feature_extractor_tf1_test.py
similarity index 77%
rename from research/object_detection/models/ssd_mobilenet_v1_feature_extractor_test.py
rename to research/object_detection/models/ssd_mobilenet_v1_feature_extractor_tf1_test.py
index eaf8776afaa89c2d729a1de3cf2b65d67f859a19..2f1d4839693c891b550e04cdaff391219c4b8cf1 100644
--- a/research/object_detection/models/ssd_mobilenet_v1_feature_extractor_test.py
+++ b/research/object_detection/models/ssd_mobilenet_v1_feature_extractor_tf1_test.py
@@ -17,20 +17,16 @@
By using parameterized test decorator, this test serves for both Slim-based and
Keras-based Mobilenet V1 feature extractors in SSD.
"""
-from absl.testing import parameterized
-
+import unittest
import numpy as np
import tensorflow.compat.v1 as tf
from object_detection.models import ssd_feature_extractor_test
from object_detection.models import ssd_mobilenet_v1_feature_extractor
-from object_detection.models import ssd_mobilenet_v1_keras_feature_extractor
+from object_detection.utils import tf_version
-@parameterized.parameters(
- {'use_keras': False},
- {'use_keras': True},
-)
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.')
class SsdMobilenetV1FeatureExtractorTest(
ssd_feature_extractor_test.SsdFeatureExtractorTestBase):
@@ -59,31 +55,17 @@ class SsdMobilenetV1FeatureExtractorTest(
an ssd_meta_arch.SSDFeatureExtractor object.
"""
min_depth = 32
- if use_keras:
- return (ssd_mobilenet_v1_keras_feature_extractor
- .SSDMobileNetV1KerasFeatureExtractor(
- is_training=is_training,
- depth_multiplier=depth_multiplier,
- min_depth=min_depth,
- pad_to_multiple=pad_to_multiple,
- conv_hyperparams=self._build_conv_hyperparams(
- add_batch_norm=False),
- freeze_batchnorm=False,
- inplace_batchnorm_update=False,
- use_explicit_padding=use_explicit_padding,
- num_layers=num_layers,
- name='MobilenetV1'))
- else:
- return ssd_mobilenet_v1_feature_extractor.SSDMobileNetV1FeatureExtractor(
- is_training,
- depth_multiplier,
- min_depth,
- pad_to_multiple,
- self.conv_hyperparams_fn,
- use_explicit_padding=use_explicit_padding,
- num_layers=num_layers)
+ del use_keras
+ return ssd_mobilenet_v1_feature_extractor.SSDMobileNetV1FeatureExtractor(
+ is_training,
+ depth_multiplier,
+ min_depth,
+ pad_to_multiple,
+ self.conv_hyperparams_fn,
+ use_explicit_padding=use_explicit_padding,
+ num_layers=num_layers)
- def test_extract_features_returns_correct_shapes_128(self, use_keras):
+ def test_extract_features_returns_correct_shapes_128(self):
image_height = 128
image_width = 128
depth_multiplier = 1.0
@@ -99,7 +81,7 @@ class SsdMobilenetV1FeatureExtractorTest(
pad_to_multiple,
expected_feature_map_shape,
use_explicit_padding=False,
- use_keras=use_keras)
+ use_keras=False)
self.check_extract_features_returns_correct_shape(
2,
image_height,
@@ -108,9 +90,9 @@ class SsdMobilenetV1FeatureExtractorTest(
pad_to_multiple,
expected_feature_map_shape,
use_explicit_padding=True,
- use_keras=use_keras)
+ use_keras=False)
- def test_extract_features_returns_correct_shapes_299(self, use_keras):
+ def test_extract_features_returns_correct_shapes_299(self):
image_height = 299
image_width = 299
depth_multiplier = 1.0
@@ -126,7 +108,7 @@ class SsdMobilenetV1FeatureExtractorTest(
pad_to_multiple,
expected_feature_map_shape,
use_explicit_padding=False,
- use_keras=use_keras)
+ use_keras=False)
self.check_extract_features_returns_correct_shape(
2,
image_height,
@@ -135,9 +117,9 @@ class SsdMobilenetV1FeatureExtractorTest(
pad_to_multiple,
expected_feature_map_shape,
use_explicit_padding=True,
- use_keras=use_keras)
+ use_keras=False)
- def test_extract_features_with_dynamic_image_shape(self, use_keras):
+ def test_extract_features_with_dynamic_image_shape(self):
image_height = 128
image_width = 128
depth_multiplier = 1.0
@@ -153,7 +135,7 @@ class SsdMobilenetV1FeatureExtractorTest(
pad_to_multiple,
expected_feature_map_shape,
use_explicit_padding=False,
- use_keras=use_keras)
+ use_keras=False)
self.check_extract_features_returns_correct_shape(
2,
image_height,
@@ -162,10 +144,10 @@ class SsdMobilenetV1FeatureExtractorTest(
pad_to_multiple,
expected_feature_map_shape,
use_explicit_padding=True,
- use_keras=use_keras)
+ use_keras=False)
def test_extract_features_returns_correct_shapes_enforcing_min_depth(
- self, use_keras):
+ self):
image_height = 299
image_width = 299
depth_multiplier = 0.5**12
@@ -181,7 +163,7 @@ class SsdMobilenetV1FeatureExtractorTest(
pad_to_multiple,
expected_feature_map_shape,
use_explicit_padding=False,
- use_keras=use_keras)
+ use_keras=False)
self.check_extract_features_returns_correct_shape(
2,
image_height,
@@ -190,10 +172,10 @@ class SsdMobilenetV1FeatureExtractorTest(
pad_to_multiple,
expected_feature_map_shape,
use_explicit_padding=True,
- use_keras=use_keras)
+ use_keras=False)
def test_extract_features_returns_correct_shapes_with_pad_to_multiple(
- self, use_keras):
+ self):
image_height = 299
image_width = 299
depth_multiplier = 1.0
@@ -209,7 +191,7 @@ class SsdMobilenetV1FeatureExtractorTest(
pad_to_multiple,
expected_feature_map_shape,
use_explicit_padding=False,
- use_keras=use_keras)
+ use_keras=False)
self.check_extract_features_returns_correct_shape(
2,
image_height,
@@ -218,10 +200,10 @@ class SsdMobilenetV1FeatureExtractorTest(
pad_to_multiple,
expected_feature_map_shape,
use_explicit_padding=True,
- use_keras=use_keras)
+ use_keras=False)
def test_extract_features_raises_error_with_invalid_image_size(
- self, use_keras):
+ self):
image_height = 32
image_width = 32
depth_multiplier = 1.0
@@ -231,34 +213,34 @@ class SsdMobilenetV1FeatureExtractorTest(
image_width,
depth_multiplier,
pad_to_multiple,
- use_keras=use_keras)
+ use_keras=False)
- def test_preprocess_returns_correct_value_range(self, use_keras):
+ def test_preprocess_returns_correct_value_range(self):
image_height = 128
image_width = 128
depth_multiplier = 1
pad_to_multiple = 1
test_image = np.random.rand(2, image_height, image_width, 3)
feature_extractor = self._create_feature_extractor(
- depth_multiplier, pad_to_multiple, use_keras=use_keras)
+ depth_multiplier, pad_to_multiple, use_keras=False)
preprocessed_image = feature_extractor.preprocess(test_image)
self.assertTrue(np.all(np.less_equal(np.abs(preprocessed_image), 1.0)))
- def test_variables_only_created_in_scope(self, use_keras):
+ def test_variables_only_created_in_scope(self):
depth_multiplier = 1
pad_to_multiple = 1
scope_name = 'MobilenetV1'
self.check_feature_extractor_variables_under_scope(
- depth_multiplier, pad_to_multiple, scope_name, use_keras=use_keras)
+ depth_multiplier, pad_to_multiple, scope_name, use_keras=False)
- def test_variable_count(self, use_keras):
+ def test_variable_count(self):
depth_multiplier = 1
pad_to_multiple = 1
variables = self.get_feature_extractor_variables(
- depth_multiplier, pad_to_multiple, use_keras=use_keras)
+ depth_multiplier, pad_to_multiple, use_keras=False)
self.assertEqual(len(variables), 151)
- def test_has_fused_batchnorm(self, use_keras):
+ def test_has_fused_batchnorm(self):
image_height = 40
image_width = 40
depth_multiplier = 1
@@ -266,17 +248,14 @@ class SsdMobilenetV1FeatureExtractorTest(
image_placeholder = tf.placeholder(tf.float32,
[1, image_height, image_width, 3])
feature_extractor = self._create_feature_extractor(
- depth_multiplier, pad_to_multiple, use_keras=use_keras)
+ depth_multiplier, pad_to_multiple, use_keras=False)
preprocessed_image = feature_extractor.preprocess(image_placeholder)
- if use_keras:
- _ = feature_extractor(preprocessed_image)
- else:
- _ = feature_extractor.extract_features(preprocessed_image)
+ _ = feature_extractor.extract_features(preprocessed_image)
self.assertTrue(
any('FusedBatchNorm' in op.type
for op in tf.get_default_graph().get_operations()))
- def test_extract_features_with_fewer_layers(self, use_keras):
+ def test_extract_features_with_fewer_layers(self):
image_height = 128
image_width = 128
depth_multiplier = 1.0
@@ -286,7 +265,7 @@ class SsdMobilenetV1FeatureExtractorTest(
self.check_extract_features_returns_correct_shape(
2, image_height, image_width, depth_multiplier, pad_to_multiple,
expected_feature_map_shape, use_explicit_padding=False, num_layers=4,
- use_keras=use_keras)
+ use_keras=False)
if __name__ == '__main__':
diff --git a/research/object_detection/models/ssd_mobilenet_v1_feature_extractor_tf2_test.py b/research/object_detection/models/ssd_mobilenet_v1_feature_extractor_tf2_test.py
new file mode 100644
index 0000000000000000000000000000000000000000..b60537b886909edbc7236f799c51733b8030380a
--- /dev/null
+++ b/research/object_detection/models/ssd_mobilenet_v1_feature_extractor_tf2_test.py
@@ -0,0 +1,248 @@
+# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Tests for SSD Mobilenet V1 feature extractors.
+
+By using parameterized test decorator, this test serves for both Slim-based and
+Keras-based Mobilenet V1 feature extractors in SSD.
+"""
+import unittest
+import numpy as np
+import tensorflow.compat.v1 as tf
+
+from object_detection.models import ssd_feature_extractor_test
+from object_detection.models import ssd_mobilenet_v1_keras_feature_extractor
+from object_detection.utils import tf_version
+
+
+@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.')
+class SsdMobilenetV1FeatureExtractorTest(
+ ssd_feature_extractor_test.SsdFeatureExtractorTestBase):
+
+ def _create_feature_extractor(self,
+ depth_multiplier,
+ pad_to_multiple,
+ use_explicit_padding=False,
+ num_layers=6,
+ is_training=False,
+ use_keras=False):
+ """Constructs a new feature extractor.
+
+ Args:
+ depth_multiplier: float depth multiplier for feature extractor
+ pad_to_multiple: the nearest multiple to zero pad the input height and
+ width dimensions to.
+ use_explicit_padding: Use 'VALID' padding for convolutions, but prepad
+ inputs so that the output dimensions are the same as if 'SAME' padding
+ were used.
+ num_layers: number of SSD layers.
+ is_training: whether the network is in training mode.
+ use_keras: if True builds a keras-based feature extractor, if False builds
+ a slim-based one.
+
+ Returns:
+ an ssd_meta_arch.SSDFeatureExtractor object.
+ """
+ del use_keras
+ min_depth = 32
+ return (ssd_mobilenet_v1_keras_feature_extractor
+ .SSDMobileNetV1KerasFeatureExtractor(
+ is_training=is_training,
+ depth_multiplier=depth_multiplier,
+ min_depth=min_depth,
+ pad_to_multiple=pad_to_multiple,
+ conv_hyperparams=self._build_conv_hyperparams(
+ add_batch_norm=False),
+ freeze_batchnorm=False,
+ inplace_batchnorm_update=False,
+ use_explicit_padding=use_explicit_padding,
+ num_layers=num_layers,
+ name='MobilenetV1'))
+
+ def test_extract_features_returns_correct_shapes_128(self):
+ image_height = 128
+ image_width = 128
+ depth_multiplier = 1.0
+ pad_to_multiple = 1
+ expected_feature_map_shape = [(2, 8, 8, 512), (2, 4, 4, 1024),
+ (2, 2, 2, 512), (2, 1, 1, 256),
+ (2, 1, 1, 256), (2, 1, 1, 128)]
+ self.check_extract_features_returns_correct_shape(
+ 2,
+ image_height,
+ image_width,
+ depth_multiplier,
+ pad_to_multiple,
+ expected_feature_map_shape,
+ use_explicit_padding=False,
+ use_keras=True)
+ self.check_extract_features_returns_correct_shape(
+ 2,
+ image_height,
+ image_width,
+ depth_multiplier,
+ pad_to_multiple,
+ expected_feature_map_shape,
+ use_explicit_padding=True,
+ use_keras=True)
+
+ def test_extract_features_returns_correct_shapes_299(self):
+ image_height = 299
+ image_width = 299
+ depth_multiplier = 1.0
+ pad_to_multiple = 1
+ expected_feature_map_shape = [(2, 19, 19, 512), (2, 10, 10, 1024),
+ (2, 5, 5, 512), (2, 3, 3, 256),
+ (2, 2, 2, 256), (2, 1, 1, 128)]
+ self.check_extract_features_returns_correct_shape(
+ 2,
+ image_height,
+ image_width,
+ depth_multiplier,
+ pad_to_multiple,
+ expected_feature_map_shape,
+ use_explicit_padding=False,
+ use_keras=True)
+ self.check_extract_features_returns_correct_shape(
+ 2,
+ image_height,
+ image_width,
+ depth_multiplier,
+ pad_to_multiple,
+ expected_feature_map_shape,
+ use_explicit_padding=True,
+ use_keras=True)
+
+ def test_extract_features_with_dynamic_image_shape(self):
+ image_height = 128
+ image_width = 128
+ depth_multiplier = 1.0
+ pad_to_multiple = 1
+ expected_feature_map_shape = [(2, 8, 8, 512), (2, 4, 4, 1024),
+ (2, 2, 2, 512), (2, 1, 1, 256),
+ (2, 1, 1, 256), (2, 1, 1, 128)]
+ self.check_extract_features_returns_correct_shapes_with_dynamic_inputs(
+ 2,
+ image_height,
+ image_width,
+ depth_multiplier,
+ pad_to_multiple,
+ expected_feature_map_shape,
+ use_explicit_padding=False,
+ use_keras=True)
+ self.check_extract_features_returns_correct_shape(
+ 2,
+ image_height,
+ image_width,
+ depth_multiplier,
+ pad_to_multiple,
+ expected_feature_map_shape,
+ use_explicit_padding=True,
+ use_keras=True)
+
+ def test_extract_features_returns_correct_shapes_enforcing_min_depth(
+ self):
+ image_height = 299
+ image_width = 299
+ depth_multiplier = 0.5**12
+ pad_to_multiple = 1
+ expected_feature_map_shape = [(2, 19, 19, 32), (2, 10, 10, 32),
+ (2, 5, 5, 32), (2, 3, 3, 32), (2, 2, 2, 32),
+ (2, 1, 1, 32)]
+ self.check_extract_features_returns_correct_shape(
+ 2,
+ image_height,
+ image_width,
+ depth_multiplier,
+ pad_to_multiple,
+ expected_feature_map_shape,
+ use_explicit_padding=False,
+ use_keras=True)
+ self.check_extract_features_returns_correct_shape(
+ 2,
+ image_height,
+ image_width,
+ depth_multiplier,
+ pad_to_multiple,
+ expected_feature_map_shape,
+ use_explicit_padding=True,
+ use_keras=True)
+
+ def test_extract_features_returns_correct_shapes_with_pad_to_multiple(
+ self):
+ image_height = 299
+ image_width = 299
+ depth_multiplier = 1.0
+ pad_to_multiple = 32
+ expected_feature_map_shape = [(2, 20, 20, 512), (2, 10, 10, 1024),
+ (2, 5, 5, 512), (2, 3, 3, 256),
+ (2, 2, 2, 256), (2, 1, 1, 128)]
+ self.check_extract_features_returns_correct_shape(
+ 2,
+ image_height,
+ image_width,
+ depth_multiplier,
+ pad_to_multiple,
+ expected_feature_map_shape,
+ use_explicit_padding=False,
+ use_keras=True)
+ self.check_extract_features_returns_correct_shape(
+ 2,
+ image_height,
+ image_width,
+ depth_multiplier,
+ pad_to_multiple,
+ expected_feature_map_shape,
+ use_explicit_padding=True,
+ use_keras=True)
+
+ def test_extract_features_raises_error_with_invalid_image_size(
+ self):
+ image_height = 32
+ image_width = 32
+ depth_multiplier = 1.0
+ pad_to_multiple = 1
+ self.check_extract_features_raises_error_with_invalid_image_size(
+ image_height,
+ image_width,
+ depth_multiplier,
+ pad_to_multiple,
+ use_keras=True)
+
+ def test_preprocess_returns_correct_value_range(self):
+ image_height = 128
+ image_width = 128
+ depth_multiplier = 1
+ pad_to_multiple = 1
+ test_image = np.random.rand(2, image_height, image_width, 3)
+ feature_extractor = self._create_feature_extractor(
+ depth_multiplier, pad_to_multiple, use_keras=True)
+ preprocessed_image = feature_extractor.preprocess(test_image)
+ self.assertTrue(np.all(np.less_equal(np.abs(preprocessed_image), 1.0)))
+
+ def test_extract_features_with_fewer_layers(self):
+ image_height = 128
+ image_width = 128
+ depth_multiplier = 1.0
+ pad_to_multiple = 1
+ expected_feature_map_shape = [(2, 8, 8, 512), (2, 4, 4, 1024),
+ (2, 2, 2, 512), (2, 1, 1, 256)]
+ self.check_extract_features_returns_correct_shape(
+ 2, image_height, image_width, depth_multiplier, pad_to_multiple,
+ expected_feature_map_shape, use_explicit_padding=False, num_layers=4,
+ use_keras=True)
+
+
+if __name__ == '__main__':
+ tf.test.main()
diff --git a/research/object_detection/models/ssd_mobilenet_v1_fpn_feature_extractor_test.py b/research/object_detection/models/ssd_mobilenet_v1_fpn_feature_extractor_tf1_test.py
similarity index 76%
rename from research/object_detection/models/ssd_mobilenet_v1_fpn_feature_extractor_test.py
rename to research/object_detection/models/ssd_mobilenet_v1_fpn_feature_extractor_tf1_test.py
index 131afed8a6b4d37bd99806715f279ba9230c5ad0..449b7803d390f60747f0f4d67d8b98414a7d24eb 100644
--- a/research/object_detection/models/ssd_mobilenet_v1_fpn_feature_extractor_test.py
+++ b/research/object_detection/models/ssd_mobilenet_v1_fpn_feature_extractor_tf1_test.py
@@ -18,19 +18,16 @@
By using parameterized test decorator, this test serves for both Slim-based and
Keras-based Mobilenet V1 FPN feature extractors in SSD.
"""
-from absl.testing import parameterized
+import unittest
import numpy as np
import tensorflow.compat.v1 as tf
from object_detection.models import ssd_feature_extractor_test
from object_detection.models import ssd_mobilenet_v1_fpn_feature_extractor
-from object_detection.models import ssd_mobilenet_v1_fpn_keras_feature_extractor
+from object_detection.utils import tf_version
-@parameterized.parameters(
- {'use_keras': False},
- {'use_keras': True},
-)
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.')
class SsdMobilenetV1FpnFeatureExtractorTest(
ssd_feature_extractor_test.SsdFeatureExtractorTestBase):
@@ -52,33 +49,19 @@ class SsdMobilenetV1FpnFeatureExtractorTest(
Returns:
an ssd_meta_arch.SSDFeatureExtractor object.
"""
+ del use_keras
min_depth = 32
- if use_keras:
- return (ssd_mobilenet_v1_fpn_keras_feature_extractor.
- SSDMobileNetV1FpnKerasFeatureExtractor(
- is_training=is_training,
- depth_multiplier=depth_multiplier,
- min_depth=min_depth,
- pad_to_multiple=pad_to_multiple,
- conv_hyperparams=self._build_conv_hyperparams(
- add_batch_norm=False),
- freeze_batchnorm=False,
- inplace_batchnorm_update=False,
- use_explicit_padding=use_explicit_padding,
- use_depthwise=True,
- name='MobilenetV1_FPN'))
- else:
- return (ssd_mobilenet_v1_fpn_feature_extractor.
- SSDMobileNetV1FpnFeatureExtractor(
- is_training,
- depth_multiplier,
- min_depth,
- pad_to_multiple,
- self.conv_hyperparams_fn,
- use_depthwise=True,
- use_explicit_padding=use_explicit_padding))
-
- def test_extract_features_returns_correct_shapes_256(self, use_keras):
+ return (ssd_mobilenet_v1_fpn_feature_extractor.
+ SSDMobileNetV1FpnFeatureExtractor(
+ is_training,
+ depth_multiplier,
+ min_depth,
+ pad_to_multiple,
+ self.conv_hyperparams_fn,
+ use_depthwise=True,
+ use_explicit_padding=use_explicit_padding))
+
+ def test_extract_features_returns_correct_shapes_256(self):
image_height = 256
image_width = 256
depth_multiplier = 1.0
@@ -89,13 +72,13 @@ class SsdMobilenetV1FpnFeatureExtractorTest(
self.check_extract_features_returns_correct_shape(
2, image_height, image_width, depth_multiplier, pad_to_multiple,
expected_feature_map_shape, use_explicit_padding=False,
- use_keras=use_keras)
+ use_keras=False)
self.check_extract_features_returns_correct_shape(
2, image_height, image_width, depth_multiplier, pad_to_multiple,
expected_feature_map_shape, use_explicit_padding=True,
- use_keras=use_keras)
+ use_keras=False)
- def test_extract_features_returns_correct_shapes_384(self, use_keras):
+ def test_extract_features_returns_correct_shapes_384(self):
image_height = 320
image_width = 320
depth_multiplier = 1.0
@@ -106,13 +89,13 @@ class SsdMobilenetV1FpnFeatureExtractorTest(
self.check_extract_features_returns_correct_shape(
2, image_height, image_width, depth_multiplier, pad_to_multiple,
expected_feature_map_shape, use_explicit_padding=False,
- use_keras=use_keras)
+ use_keras=False)
self.check_extract_features_returns_correct_shape(
2, image_height, image_width, depth_multiplier, pad_to_multiple,
expected_feature_map_shape, use_explicit_padding=True,
- use_keras=use_keras)
+ use_keras=False)
- def test_extract_features_with_dynamic_image_shape(self, use_keras):
+ def test_extract_features_with_dynamic_image_shape(self):
image_height = 256
image_width = 256
depth_multiplier = 1.0
@@ -123,14 +106,14 @@ class SsdMobilenetV1FpnFeatureExtractorTest(
self.check_extract_features_returns_correct_shapes_with_dynamic_inputs(
2, image_height, image_width, depth_multiplier, pad_to_multiple,
expected_feature_map_shape, use_explicit_padding=False,
- use_keras=use_keras)
+ use_keras=False)
self.check_extract_features_returns_correct_shapes_with_dynamic_inputs(
2, image_height, image_width, depth_multiplier, pad_to_multiple,
expected_feature_map_shape, use_explicit_padding=True,
- use_keras=use_keras)
+ use_keras=False)
def test_extract_features_returns_correct_shapes_with_pad_to_multiple(
- self, use_keras):
+ self):
image_height = 299
image_width = 299
depth_multiplier = 1.0
@@ -141,14 +124,14 @@ class SsdMobilenetV1FpnFeatureExtractorTest(
self.check_extract_features_returns_correct_shape(
2, image_height, image_width, depth_multiplier, pad_to_multiple,
expected_feature_map_shape, use_explicit_padding=False,
- use_keras=use_keras)
+ use_keras=False)
self.check_extract_features_returns_correct_shape(
2, image_height, image_width, depth_multiplier, pad_to_multiple,
expected_feature_map_shape, use_explicit_padding=True,
- use_keras=use_keras)
+ use_keras=False)
def test_extract_features_returns_correct_shapes_enforcing_min_depth(
- self, use_keras):
+ self):
image_height = 256
image_width = 256
depth_multiplier = 0.5**12
@@ -159,23 +142,23 @@ class SsdMobilenetV1FpnFeatureExtractorTest(
self.check_extract_features_returns_correct_shape(
2, image_height, image_width, depth_multiplier, pad_to_multiple,
expected_feature_map_shape, use_explicit_padding=False,
- use_keras=use_keras)
+ use_keras=False)
self.check_extract_features_returns_correct_shape(
2, image_height, image_width, depth_multiplier, pad_to_multiple,
expected_feature_map_shape, use_explicit_padding=True,
- use_keras=use_keras)
+ use_keras=False)
def test_extract_features_raises_error_with_invalid_image_size(
- self, use_keras):
+ self):
image_height = 32
image_width = 32
depth_multiplier = 1.0
pad_to_multiple = 1
self.check_extract_features_raises_error_with_invalid_image_size(
image_height, image_width, depth_multiplier, pad_to_multiple,
- use_keras=use_keras)
+ use_keras=False)
- def test_preprocess_returns_correct_value_range(self, use_keras):
+ def test_preprocess_returns_correct_value_range(self):
image_height = 256
image_width = 256
depth_multiplier = 1
@@ -183,25 +166,25 @@ class SsdMobilenetV1FpnFeatureExtractorTest(
test_image = np.random.rand(2, image_height, image_width, 3)
feature_extractor = self._create_feature_extractor(depth_multiplier,
pad_to_multiple,
- use_keras=use_keras)
+ use_keras=False)
preprocessed_image = feature_extractor.preprocess(test_image)
self.assertTrue(np.all(np.less_equal(np.abs(preprocessed_image), 1.0)))
- def test_variables_only_created_in_scope(self, use_keras):
+ def test_variables_only_created_in_scope(self):
depth_multiplier = 1
pad_to_multiple = 1
scope_name = 'MobilenetV1'
self.check_feature_extractor_variables_under_scope(
- depth_multiplier, pad_to_multiple, scope_name, use_keras=use_keras)
+ depth_multiplier, pad_to_multiple, scope_name, use_keras=False)
- def test_variable_count(self, use_keras):
+ def test_variable_count(self):
depth_multiplier = 1
pad_to_multiple = 1
variables = self.get_feature_extractor_variables(
- depth_multiplier, pad_to_multiple, use_keras=use_keras)
+ depth_multiplier, pad_to_multiple, use_keras=False)
self.assertEqual(len(variables), 153)
- def test_fused_batchnorm(self, use_keras):
+ def test_fused_batchnorm(self):
image_height = 256
image_width = 256
depth_multiplier = 1
@@ -210,12 +193,9 @@ class SsdMobilenetV1FpnFeatureExtractorTest(
[1, image_height, image_width, 3])
feature_extractor = self._create_feature_extractor(depth_multiplier,
pad_to_multiple,
- use_keras=use_keras)
+ use_keras=False)
preprocessed_image = feature_extractor.preprocess(image_placeholder)
- if use_keras:
- _ = feature_extractor(preprocessed_image)
- else:
- _ = feature_extractor.extract_features(preprocessed_image)
+ _ = feature_extractor.extract_features(preprocessed_image)
self.assertTrue(
any('FusedBatchNorm' in op.type
diff --git a/research/object_detection/models/ssd_mobilenet_v1_fpn_feature_extractor_tf2_test.py b/research/object_detection/models/ssd_mobilenet_v1_fpn_feature_extractor_tf2_test.py
new file mode 100644
index 0000000000000000000000000000000000000000..307cfa8b0b5594f921fee670699cc026ec16fbce
--- /dev/null
+++ b/research/object_detection/models/ssd_mobilenet_v1_fpn_feature_extractor_tf2_test.py
@@ -0,0 +1,179 @@
+# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+
+"""Tests for ssd_mobilenet_v1_fpn_feature_extractor.
+
+By using parameterized test decorator, this test serves for both Slim-based and
+Keras-based Mobilenet V1 FPN feature extractors in SSD.
+"""
+import unittest
+import numpy as np
+import tensorflow.compat.v1 as tf
+
+from object_detection.models import ssd_feature_extractor_test
+from object_detection.models import ssd_mobilenet_v1_fpn_keras_feature_extractor
+from object_detection.utils import tf_version
+
+
+@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.')
+class SsdMobilenetV1FpnFeatureExtractorTest(
+ ssd_feature_extractor_test.SsdFeatureExtractorTestBase):
+
+ def _create_feature_extractor(self, depth_multiplier, pad_to_multiple,
+ is_training=True, use_explicit_padding=False,
+ use_keras=True):
+ """Constructs a new feature extractor.
+
+ Args:
+ depth_multiplier: float depth multiplier for feature extractor
+ pad_to_multiple: the nearest multiple to zero pad the input height and
+ width dimensions to.
+ is_training: whether the network is in training mode.
+ use_explicit_padding: Use 'VALID' padding for convolutions, but prepad
+ inputs so that the output dimensions are the same as if 'SAME' padding
+ were used.
+ use_keras: if True builds a keras-based feature extractor, if False builds
+ a slim-based one.
+ Returns:
+ an ssd_meta_arch.SSDFeatureExtractor object.
+ """
+ min_depth = 32
+ del use_keras
+ return (ssd_mobilenet_v1_fpn_keras_feature_extractor.
+ SSDMobileNetV1FpnKerasFeatureExtractor(
+ is_training=is_training,
+ depth_multiplier=depth_multiplier,
+ min_depth=min_depth,
+ pad_to_multiple=pad_to_multiple,
+ conv_hyperparams=self._build_conv_hyperparams(
+ add_batch_norm=False),
+ freeze_batchnorm=False,
+ inplace_batchnorm_update=False,
+ use_explicit_padding=use_explicit_padding,
+ use_depthwise=True,
+ name='MobilenetV1_FPN'))
+
+ def test_extract_features_returns_correct_shapes_256(self):
+ image_height = 256
+ image_width = 256
+ depth_multiplier = 1.0
+ pad_to_multiple = 1
+ expected_feature_map_shape = [(2, 32, 32, 256), (2, 16, 16, 256),
+ (2, 8, 8, 256), (2, 4, 4, 256),
+ (2, 2, 2, 256)]
+ self.check_extract_features_returns_correct_shape(
+ 2, image_height, image_width, depth_multiplier, pad_to_multiple,
+ expected_feature_map_shape, use_explicit_padding=False,
+ use_keras=True)
+ self.check_extract_features_returns_correct_shape(
+ 2, image_height, image_width, depth_multiplier, pad_to_multiple,
+ expected_feature_map_shape, use_explicit_padding=True,
+ use_keras=True)
+
+ def test_extract_features_returns_correct_shapes_384(self):
+ image_height = 320
+ image_width = 320
+ depth_multiplier = 1.0
+ pad_to_multiple = 1
+ expected_feature_map_shape = [(2, 40, 40, 256), (2, 20, 20, 256),
+ (2, 10, 10, 256), (2, 5, 5, 256),
+ (2, 3, 3, 256)]
+ self.check_extract_features_returns_correct_shape(
+ 2, image_height, image_width, depth_multiplier, pad_to_multiple,
+ expected_feature_map_shape, use_explicit_padding=False,
+ use_keras=True)
+ self.check_extract_features_returns_correct_shape(
+ 2, image_height, image_width, depth_multiplier, pad_to_multiple,
+ expected_feature_map_shape, use_explicit_padding=True,
+ use_keras=True)
+
+ def test_extract_features_with_dynamic_image_shape(self):
+ image_height = 256
+ image_width = 256
+ depth_multiplier = 1.0
+ pad_to_multiple = 1
+ expected_feature_map_shape = [(2, 32, 32, 256), (2, 16, 16, 256),
+ (2, 8, 8, 256), (2, 4, 4, 256),
+ (2, 2, 2, 256)]
+ self.check_extract_features_returns_correct_shapes_with_dynamic_inputs(
+ 2, image_height, image_width, depth_multiplier, pad_to_multiple,
+ expected_feature_map_shape, use_explicit_padding=False,
+ use_keras=True)
+ self.check_extract_features_returns_correct_shapes_with_dynamic_inputs(
+ 2, image_height, image_width, depth_multiplier, pad_to_multiple,
+ expected_feature_map_shape, use_explicit_padding=True,
+ use_keras=True)
+
+ def test_extract_features_returns_correct_shapes_with_pad_to_multiple(
+ self):
+ image_height = 299
+ image_width = 299
+ depth_multiplier = 1.0
+ pad_to_multiple = 32
+ expected_feature_map_shape = [(2, 40, 40, 256), (2, 20, 20, 256),
+ (2, 10, 10, 256), (2, 5, 5, 256),
+ (2, 3, 3, 256)]
+ self.check_extract_features_returns_correct_shape(
+ 2, image_height, image_width, depth_multiplier, pad_to_multiple,
+ expected_feature_map_shape, use_explicit_padding=False,
+ use_keras=True)
+ self.check_extract_features_returns_correct_shape(
+ 2, image_height, image_width, depth_multiplier, pad_to_multiple,
+ expected_feature_map_shape, use_explicit_padding=True,
+ use_keras=True)
+
+ def test_extract_features_returns_correct_shapes_enforcing_min_depth(
+ self):
+ image_height = 256
+ image_width = 256
+ depth_multiplier = 0.5**12
+ pad_to_multiple = 1
+ expected_feature_map_shape = [(2, 32, 32, 32), (2, 16, 16, 32),
+ (2, 8, 8, 32), (2, 4, 4, 32),
+ (2, 2, 2, 32)]
+ self.check_extract_features_returns_correct_shape(
+ 2, image_height, image_width, depth_multiplier, pad_to_multiple,
+ expected_feature_map_shape, use_explicit_padding=False,
+ use_keras=True)
+ self.check_extract_features_returns_correct_shape(
+ 2, image_height, image_width, depth_multiplier, pad_to_multiple,
+ expected_feature_map_shape, use_explicit_padding=True,
+ use_keras=True)
+
+ def test_extract_features_raises_error_with_invalid_image_size(
+ self):
+ image_height = 32
+ image_width = 32
+ depth_multiplier = 1.0
+ pad_to_multiple = 1
+ self.check_extract_features_raises_error_with_invalid_image_size(
+ image_height, image_width, depth_multiplier, pad_to_multiple,
+ use_keras=True)
+
+ def test_preprocess_returns_correct_value_range(self):
+ image_height = 256
+ image_width = 256
+ depth_multiplier = 1
+ pad_to_multiple = 1
+ test_image = np.random.rand(2, image_height, image_width, 3)
+ feature_extractor = self._create_feature_extractor(depth_multiplier,
+ pad_to_multiple,
+ use_keras=True)
+ preprocessed_image = feature_extractor.preprocess(test_image)
+ self.assertTrue(np.all(np.less_equal(np.abs(preprocessed_image), 1.0)))
+
+
+if __name__ == '__main__':
+ tf.test.main()
diff --git a/research/object_detection/models/ssd_mobilenet_v1_fpn_keras_feature_extractor.py b/research/object_detection/models/ssd_mobilenet_v1_fpn_keras_feature_extractor.py
index 53d3fdbd447aa723a560b71461c186906465b25d..7792931875dc122ea938f8c87633e31f4adc4336 100644
--- a/research/object_detection/models/ssd_mobilenet_v1_fpn_keras_feature_extractor.py
+++ b/research/object_detection/models/ssd_mobilenet_v1_fpn_keras_feature_extractor.py
@@ -123,7 +123,7 @@ class SSDMobileNetV1FpnKerasFeatureExtractor(
'Conv2d_3_pointwise', 'Conv2d_5_pointwise', 'Conv2d_11_pointwise',
'Conv2d_13_pointwise'
]
- self._mobilenet_v1 = None
+ self.classification_backbone = None
self._fpn_features_generator = None
self._coarse_feature_layers = []
@@ -147,7 +147,7 @@ class SSDMobileNetV1FpnKerasFeatureExtractor(
name='conv_pw_11_relu').output
conv2d_13_pointwise = full_mobilenet_v1.get_layer(
name='conv_pw_13_relu').output
- self._mobilenet_v1 = tf.keras.Model(
+ self.classification_backbone = tf.keras.Model(
inputs=full_mobilenet_v1.inputs,
outputs=[conv2d_3_pointwise, conv2d_5_pointwise,
conv2d_11_pointwise, conv2d_13_pointwise]
@@ -218,7 +218,7 @@ class SSDMobileNetV1FpnKerasFeatureExtractor(
preprocessed_inputs = shape_utils.check_min_image_dim(
33, preprocessed_inputs)
- image_features = self._mobilenet_v1(
+ image_features = self.classification_backbone(
ops.pad_to_multiple(preprocessed_inputs, self._pad_to_multiple))
feature_block_list = []
@@ -243,3 +243,14 @@ class SSDMobileNetV1FpnKerasFeatureExtractor(
last_feature_map = layer(last_feature_map)
feature_maps.append(last_feature_map)
return feature_maps
+
+ def restore_from_classification_checkpoint_fn(self, feature_extractor_scope):
+ """Returns a map for restoring from an (object-based) checkpoint.
+
+ Args:
+ feature_extractor_scope: A scope name for the feature extractor (unused).
+
+ Returns:
+ A dict mapping keys to Keras models
+ """
+ return {'feature_extractor': self.classification_backbone}
diff --git a/research/object_detection/models/ssd_mobilenet_v1_keras_feature_extractor.py b/research/object_detection/models/ssd_mobilenet_v1_keras_feature_extractor.py
index 679dc25dbd21039e8d0cbc2f3eeaa2eeac9c56c6..2f0df91540ae3598cde3d08c764b023c3c7f758e 100644
--- a/research/object_detection/models/ssd_mobilenet_v1_keras_feature_extractor.py
+++ b/research/object_detection/models/ssd_mobilenet_v1_keras_feature_extractor.py
@@ -93,7 +93,7 @@ class SSDMobileNetV1KerasFeatureExtractor(
'use_explicit_padding': self._use_explicit_padding,
'use_depthwise': self._use_depthwise,
}
- self._mobilenet_v1 = None
+ self.classification_backbone = None
self._feature_map_generator = None
def build(self, input_shape):
@@ -111,7 +111,7 @@ class SSDMobileNetV1KerasFeatureExtractor(
name='conv_pw_11_relu').output
conv2d_13_pointwise = full_mobilenet_v1.get_layer(
name='conv_pw_13_relu').output
- self._mobilenet_v1 = tf.keras.Model(
+ self.classification_backbone = tf.keras.Model(
inputs=full_mobilenet_v1.inputs,
outputs=[conv2d_11_pointwise, conv2d_13_pointwise])
self._feature_map_generator = (
@@ -155,7 +155,7 @@ class SSDMobileNetV1KerasFeatureExtractor(
preprocessed_inputs = shape_utils.check_min_image_dim(
33, preprocessed_inputs)
- image_features = self._mobilenet_v1(
+ image_features = self.classification_backbone(
ops.pad_to_multiple(preprocessed_inputs, self._pad_to_multiple))
feature_maps = self._feature_map_generator({
diff --git a/research/object_detection/models/ssd_mobilenet_v1_ppn_feature_extractor_test.py b/research/object_detection/models/ssd_mobilenet_v1_ppn_feature_extractor_tf1_test.py
similarity index 98%
rename from research/object_detection/models/ssd_mobilenet_v1_ppn_feature_extractor_test.py
rename to research/object_detection/models/ssd_mobilenet_v1_ppn_feature_extractor_tf1_test.py
index c5a9cd807128e2e513d0dd8a9d9348921ff0e4d9..b5918c0dfa9a3e3819df14f9d504dd63b8febc63 100644
--- a/research/object_detection/models/ssd_mobilenet_v1_ppn_feature_extractor_test.py
+++ b/research/object_detection/models/ssd_mobilenet_v1_ppn_feature_extractor_tf1_test.py
@@ -14,13 +14,16 @@
# ==============================================================================
"""Tests for ssd_mobilenet_v1_ppn_feature_extractor."""
+import unittest
import numpy as np
import tensorflow.compat.v1 as tf
from object_detection.models import ssd_feature_extractor_test
from object_detection.models import ssd_mobilenet_v1_ppn_feature_extractor
+from object_detection.utils import tf_version
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.')
class SsdMobilenetV1PpnFeatureExtractorTest(
ssd_feature_extractor_test.SsdFeatureExtractorTestBase):
diff --git a/research/object_detection/models/ssd_mobilenet_v2_feature_extractor_test.py b/research/object_detection/models/ssd_mobilenet_v2_feature_extractor_tf1_test.py
similarity index 70%
rename from research/object_detection/models/ssd_mobilenet_v2_feature_extractor_test.py
rename to research/object_detection/models/ssd_mobilenet_v2_feature_extractor_tf1_test.py
index 40eee93dbc021da82a788ff097cae580ebdd692b..96f9bc26e120f2f4396968429f474406b67894ca 100644
--- a/research/object_detection/models/ssd_mobilenet_v2_feature_extractor_test.py
+++ b/research/object_detection/models/ssd_mobilenet_v2_feature_extractor_tf1_test.py
@@ -14,20 +14,17 @@
# ==============================================================================
"""Tests for ssd_mobilenet_v2_feature_extractor."""
-from absl.testing import parameterized
+import unittest
import numpy as np
import tensorflow.compat.v1 as tf
from object_detection.models import ssd_feature_extractor_test
from object_detection.models import ssd_mobilenet_v2_feature_extractor
-from object_detection.models import ssd_mobilenet_v2_keras_feature_extractor
+from object_detection.utils import tf_version
-@parameterized.parameters(
- {'use_keras': False},
- {'use_keras': True},
-)
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.')
class SsdMobilenetV2FeatureExtractorTest(
ssd_feature_extractor_test.SsdFeatureExtractorTestBase):
@@ -35,8 +32,7 @@ class SsdMobilenetV2FeatureExtractorTest(
depth_multiplier,
pad_to_multiple,
use_explicit_padding=False,
- num_layers=6,
- use_keras=False):
+ num_layers=6):
"""Constructs a new feature extractor.
Args:
@@ -47,36 +43,20 @@ class SsdMobilenetV2FeatureExtractorTest(
inputs so that the output dimensions are the same as if 'SAME' padding
were used.
num_layers: number of SSD layers.
- use_keras: if True builds a keras-based feature extractor, if False builds
- a slim-based one.
Returns:
an ssd_meta_arch.SSDFeatureExtractor object.
"""
min_depth = 32
- if use_keras:
- return (ssd_mobilenet_v2_keras_feature_extractor.
- SSDMobileNetV2KerasFeatureExtractor(
- is_training=False,
- depth_multiplier=depth_multiplier,
- min_depth=min_depth,
- pad_to_multiple=pad_to_multiple,
- conv_hyperparams=self._build_conv_hyperparams(),
- freeze_batchnorm=False,
- inplace_batchnorm_update=False,
- use_explicit_padding=use_explicit_padding,
- num_layers=num_layers,
- name='MobilenetV2'))
- else:
- return ssd_mobilenet_v2_feature_extractor.SSDMobileNetV2FeatureExtractor(
- False,
- depth_multiplier,
- min_depth,
- pad_to_multiple,
- self.conv_hyperparams_fn,
- use_explicit_padding=use_explicit_padding,
- num_layers=num_layers)
-
- def test_extract_features_returns_correct_shapes_128(self, use_keras):
+ return ssd_mobilenet_v2_feature_extractor.SSDMobileNetV2FeatureExtractor(
+ False,
+ depth_multiplier,
+ min_depth,
+ pad_to_multiple,
+ self.conv_hyperparams_fn,
+ use_explicit_padding=use_explicit_padding,
+ num_layers=num_layers)
+
+ def test_extract_features_returns_correct_shapes_128(self):
image_height = 128
image_width = 128
depth_multiplier = 1.0
@@ -86,10 +66,10 @@ class SsdMobilenetV2FeatureExtractorTest(
(2, 1, 1, 256), (2, 1, 1, 128)]
self.check_extract_features_returns_correct_shape(
2, image_height, image_width, depth_multiplier, pad_to_multiple,
- expected_feature_map_shape, use_keras=use_keras)
+ expected_feature_map_shape)
def test_extract_features_returns_correct_shapes_128_explicit_padding(
- self, use_keras):
+ self):
image_height = 128
image_width = 128
depth_multiplier = 1.0
@@ -99,11 +79,10 @@ class SsdMobilenetV2FeatureExtractorTest(
(2, 1, 1, 256), (2, 1, 1, 128)]
self.check_extract_features_returns_correct_shape(
2, image_height, image_width, depth_multiplier, pad_to_multiple,
- expected_feature_map_shape, use_explicit_padding=True,
- use_keras=use_keras)
+ expected_feature_map_shape, use_explicit_padding=True)
def test_extract_features_returns_correct_shapes_with_dynamic_inputs(
- self, use_keras):
+ self):
image_height = 128
image_width = 128
depth_multiplier = 1.0
@@ -113,9 +92,9 @@ class SsdMobilenetV2FeatureExtractorTest(
(2, 1, 1, 256), (2, 1, 1, 128)]
self.check_extract_features_returns_correct_shapes_with_dynamic_inputs(
2, image_height, image_width, depth_multiplier, pad_to_multiple,
- expected_feature_map_shape, use_keras=use_keras)
+ expected_feature_map_shape)
- def test_extract_features_returns_correct_shapes_299(self, use_keras):
+ def test_extract_features_returns_correct_shapes_299(self):
image_height = 299
image_width = 299
depth_multiplier = 1.0
@@ -125,10 +104,10 @@ class SsdMobilenetV2FeatureExtractorTest(
(2, 2, 2, 256), (2, 1, 1, 128)]
self.check_extract_features_returns_correct_shape(
2, image_height, image_width, depth_multiplier, pad_to_multiple,
- expected_feature_map_shape, use_keras=use_keras)
+ expected_feature_map_shape)
def test_extract_features_returns_correct_shapes_enforcing_min_depth(
- self, use_keras):
+ self):
image_height = 299
image_width = 299
depth_multiplier = 0.5**12
@@ -138,10 +117,10 @@ class SsdMobilenetV2FeatureExtractorTest(
(2, 2, 2, 32), (2, 1, 1, 32)]
self.check_extract_features_returns_correct_shape(
2, image_height, image_width, depth_multiplier, pad_to_multiple,
- expected_feature_map_shape, use_keras=use_keras)
+ expected_feature_map_shape)
def test_extract_features_returns_correct_shapes_with_pad_to_multiple(
- self, use_keras):
+ self):
image_height = 299
image_width = 299
depth_multiplier = 1.0
@@ -151,45 +130,43 @@ class SsdMobilenetV2FeatureExtractorTest(
(2, 2, 2, 256), (2, 1, 1, 128)]
self.check_extract_features_returns_correct_shape(
2, image_height, image_width, depth_multiplier, pad_to_multiple,
- expected_feature_map_shape, use_keras=use_keras)
+ expected_feature_map_shape)
def test_extract_features_raises_error_with_invalid_image_size(
- self, use_keras):
+ self):
image_height = 32
image_width = 32
depth_multiplier = 1.0
pad_to_multiple = 1
self.check_extract_features_raises_error_with_invalid_image_size(
- image_height, image_width, depth_multiplier, pad_to_multiple,
- use_keras=use_keras)
+ image_height, image_width, depth_multiplier, pad_to_multiple)
- def test_preprocess_returns_correct_value_range(self, use_keras):
+ def test_preprocess_returns_correct_value_range(self):
image_height = 128
image_width = 128
depth_multiplier = 1
pad_to_multiple = 1
test_image = np.random.rand(4, image_height, image_width, 3)
feature_extractor = self._create_feature_extractor(depth_multiplier,
- pad_to_multiple,
- use_keras=use_keras)
+ pad_to_multiple)
preprocessed_image = feature_extractor.preprocess(test_image)
self.assertTrue(np.all(np.less_equal(np.abs(preprocessed_image), 1.0)))
- def test_variables_only_created_in_scope(self, use_keras):
+ def test_variables_only_created_in_scope(self):
depth_multiplier = 1
pad_to_multiple = 1
scope_name = 'MobilenetV2'
self.check_feature_extractor_variables_under_scope(
- depth_multiplier, pad_to_multiple, scope_name, use_keras=use_keras)
+ depth_multiplier, pad_to_multiple, scope_name)
- def test_variable_count(self, use_keras):
+ def test_variable_count(self):
depth_multiplier = 1
pad_to_multiple = 1
variables = self.get_feature_extractor_variables(
- depth_multiplier, pad_to_multiple, use_keras=use_keras)
+ depth_multiplier, pad_to_multiple)
self.assertEqual(len(variables), 292)
- def test_has_fused_batchnorm(self, use_keras):
+ def test_has_fused_batchnorm(self):
image_height = 40
image_width = 40
depth_multiplier = 1
@@ -197,17 +174,13 @@ class SsdMobilenetV2FeatureExtractorTest(
image_placeholder = tf.placeholder(tf.float32,
[1, image_height, image_width, 3])
feature_extractor = self._create_feature_extractor(depth_multiplier,
- pad_to_multiple,
- use_keras=use_keras)
+ pad_to_multiple)
preprocessed_image = feature_extractor.preprocess(image_placeholder)
- if use_keras:
- _ = feature_extractor(preprocessed_image)
- else:
- _ = feature_extractor.extract_features(preprocessed_image)
+ _ = feature_extractor.extract_features(preprocessed_image)
self.assertTrue(any('FusedBatchNorm' in op.type
for op in tf.get_default_graph().get_operations()))
- def test_extract_features_with_fewer_layers(self, use_keras):
+ def test_extract_features_with_fewer_layers(self):
image_height = 128
image_width = 128
depth_multiplier = 1.0
@@ -216,8 +189,7 @@ class SsdMobilenetV2FeatureExtractorTest(
(2, 2, 2, 512), (2, 1, 1, 256)]
self.check_extract_features_returns_correct_shape(
2, image_height, image_width, depth_multiplier, pad_to_multiple,
- expected_feature_map_shape, use_explicit_padding=False, num_layers=4,
- use_keras=use_keras)
+ expected_feature_map_shape, use_explicit_padding=False, num_layers=4)
if __name__ == '__main__':
diff --git a/research/object_detection/models/ssd_mobilenet_v2_feature_extractor_tf2_test.py b/research/object_detection/models/ssd_mobilenet_v2_feature_extractor_tf2_test.py
new file mode 100644
index 0000000000000000000000000000000000000000..6d4cb5afcf7c978cc24e01d5806914c618cd7fd7
--- /dev/null
+++ b/research/object_detection/models/ssd_mobilenet_v2_feature_extractor_tf2_test.py
@@ -0,0 +1,192 @@
+# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+
+"""Tests for ssd_mobilenet_v2_feature_extractor."""
+import unittest
+
+import numpy as np
+import tensorflow.compat.v1 as tf
+
+from object_detection.models import ssd_feature_extractor_test
+from object_detection.models import ssd_mobilenet_v2_keras_feature_extractor
+from object_detection.utils import tf_version
+
+
+@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.')
+class SsdMobilenetV2FeatureExtractorTest(
+ ssd_feature_extractor_test.SsdFeatureExtractorTestBase):
+
+ def _create_feature_extractor(self,
+ depth_multiplier,
+ pad_to_multiple,
+ use_explicit_padding=False,
+ num_layers=6,
+ use_keras=False):
+ """Constructs a new feature extractor.
+
+ Args:
+ depth_multiplier: float depth multiplier for feature extractor
+ pad_to_multiple: the nearest multiple to zero pad the input height and
+ width dimensions to.
+ use_explicit_padding: use 'VALID' padding for convolutions, but prepad
+ inputs so that the output dimensions are the same as if 'SAME' padding
+ were used.
+ num_layers: number of SSD layers.
+ use_keras: unused argument.
+
+ Returns:
+ an ssd_meta_arch.SSDFeatureExtractor object.
+ """
+ del use_keras
+ min_depth = 32
+ return (ssd_mobilenet_v2_keras_feature_extractor.
+ SSDMobileNetV2KerasFeatureExtractor(
+ is_training=False,
+ depth_multiplier=depth_multiplier,
+ min_depth=min_depth,
+ pad_to_multiple=pad_to_multiple,
+ conv_hyperparams=self._build_conv_hyperparams(),
+ freeze_batchnorm=False,
+ inplace_batchnorm_update=False,
+ use_explicit_padding=use_explicit_padding,
+ num_layers=num_layers,
+ name='MobilenetV2'))
+
+ def test_extract_features_returns_correct_shapes_128(self):
+ image_height = 128
+ image_width = 128
+ depth_multiplier = 1.0
+ pad_to_multiple = 1
+ expected_feature_map_shape = [(2, 8, 8, 576), (2, 4, 4, 1280),
+ (2, 2, 2, 512), (2, 1, 1, 256),
+ (2, 1, 1, 256), (2, 1, 1, 128)]
+ self.check_extract_features_returns_correct_shape(
+ 2, image_height, image_width, depth_multiplier, pad_to_multiple,
+ expected_feature_map_shape, use_keras=True)
+
+ def test_extract_features_returns_correct_shapes_128_explicit_padding(
+ self):
+ image_height = 128
+ image_width = 128
+ depth_multiplier = 1.0
+ pad_to_multiple = 1
+ expected_feature_map_shape = [(2, 8, 8, 576), (2, 4, 4, 1280),
+ (2, 2, 2, 512), (2, 1, 1, 256),
+ (2, 1, 1, 256), (2, 1, 1, 128)]
+ self.check_extract_features_returns_correct_shape(
+ 2, image_height, image_width, depth_multiplier, pad_to_multiple,
+ expected_feature_map_shape, use_explicit_padding=True, use_keras=True)
+
+ def test_extract_features_returns_correct_shapes_with_dynamic_inputs(
+ self):
+ image_height = 128
+ image_width = 128
+ depth_multiplier = 1.0
+ pad_to_multiple = 1
+ expected_feature_map_shape = [(2, 8, 8, 576), (2, 4, 4, 1280),
+ (2, 2, 2, 512), (2, 1, 1, 256),
+ (2, 1, 1, 256), (2, 1, 1, 128)]
+ self.check_extract_features_returns_correct_shapes_with_dynamic_inputs(
+ 2, image_height, image_width, depth_multiplier, pad_to_multiple,
+ expected_feature_map_shape, use_keras=True)
+
+ def test_extract_features_returns_correct_shapes_299(self):
+ image_height = 299
+ image_width = 299
+ depth_multiplier = 1.0
+ pad_to_multiple = 1
+ expected_feature_map_shape = [(2, 19, 19, 576), (2, 10, 10, 1280),
+ (2, 5, 5, 512), (2, 3, 3, 256),
+ (2, 2, 2, 256), (2, 1, 1, 128)]
+ self.check_extract_features_returns_correct_shape(
+ 2, image_height, image_width, depth_multiplier, pad_to_multiple,
+ expected_feature_map_shape, use_keras=True)
+
+ def test_extract_features_returns_correct_shapes_enforcing_min_depth(
+ self):
+ image_height = 299
+ image_width = 299
+ depth_multiplier = 0.5**12
+ pad_to_multiple = 1
+ expected_feature_map_shape = [(2, 19, 19, 192), (2, 10, 10, 32),
+ (2, 5, 5, 32), (2, 3, 3, 32),
+ (2, 2, 2, 32), (2, 1, 1, 32)]
+ self.check_extract_features_returns_correct_shape(
+ 2, image_height, image_width, depth_multiplier, pad_to_multiple,
+ expected_feature_map_shape, use_keras=True)
+
+ def test_extract_features_returns_correct_shapes_with_pad_to_multiple(
+ self):
+ image_height = 299
+ image_width = 299
+ depth_multiplier = 1.0
+ pad_to_multiple = 32
+ expected_feature_map_shape = [(2, 20, 20, 576), (2, 10, 10, 1280),
+ (2, 5, 5, 512), (2, 3, 3, 256),
+ (2, 2, 2, 256), (2, 1, 1, 128)]
+ self.check_extract_features_returns_correct_shape(
+ 2, image_height, image_width, depth_multiplier, pad_to_multiple,
+ expected_feature_map_shape, use_keras=True)
+
+ def test_extract_features_raises_error_with_invalid_image_size(
+ self):
+ image_height = 32
+ image_width = 32
+ depth_multiplier = 1.0
+ pad_to_multiple = 1
+ self.check_extract_features_raises_error_with_invalid_image_size(
+ image_height, image_width, depth_multiplier, pad_to_multiple,
+ use_keras=True)
+
+ def test_preprocess_returns_correct_value_range(self):
+ image_height = 128
+ image_width = 128
+ depth_multiplier = 1
+ pad_to_multiple = 1
+ test_image = np.random.rand(4, image_height, image_width, 3)
+ feature_extractor = self._create_feature_extractor(depth_multiplier,
+ pad_to_multiple)
+ preprocessed_image = feature_extractor.preprocess(test_image)
+ self.assertTrue(np.all(np.less_equal(np.abs(preprocessed_image), 1.0)))
+
+ def test_variables_only_created_in_scope(self):
+ depth_multiplier = 1
+ pad_to_multiple = 1
+ scope_name = 'MobilenetV2'
+ self.check_feature_extractor_variables_under_scope(
+ depth_multiplier, pad_to_multiple, scope_name, use_keras=True)
+
+ def test_variable_count(self):
+ depth_multiplier = 1
+ pad_to_multiple = 1
+ variables = self.get_feature_extractor_variables(
+ depth_multiplier, pad_to_multiple, use_keras=True)
+ self.assertEqual(len(variables), 292)
+
+ def test_extract_features_with_fewer_layers(self):
+ image_height = 128
+ image_width = 128
+ depth_multiplier = 1.0
+ pad_to_multiple = 1
+ expected_feature_map_shape = [(2, 8, 8, 576), (2, 4, 4, 1280),
+ (2, 2, 2, 512), (2, 1, 1, 256)]
+ self.check_extract_features_returns_correct_shape(
+ 2, image_height, image_width, depth_multiplier, pad_to_multiple,
+ expected_feature_map_shape, use_explicit_padding=False, num_layers=4,
+ use_keras=True)
+
+
+if __name__ == '__main__':
+ tf.test.main()
diff --git a/research/object_detection/models/ssd_mobilenet_v2_fpn_feature_extractor_test.py b/research/object_detection/models/ssd_mobilenet_v2_fpn_feature_extractor_tf1_test.py
similarity index 70%
rename from research/object_detection/models/ssd_mobilenet_v2_fpn_feature_extractor_test.py
rename to research/object_detection/models/ssd_mobilenet_v2_fpn_feature_extractor_tf1_test.py
index f5bb42b68cf9afe0af64ebce81f4f4e12f48e277..9cdbed5fbe160baefb0afd41477748b9374e191f 100644
--- a/research/object_detection/models/ssd_mobilenet_v2_fpn_feature_extractor_test.py
+++ b/research/object_detection/models/ssd_mobilenet_v2_fpn_feature_extractor_tf1_test.py
@@ -18,31 +18,23 @@
By using parameterized test decorator, this test serves for both Slim-based and
Keras-based Mobilenet V2 FPN feature extractors in SSD.
"""
+import unittest
from absl.testing import parameterized
import numpy as np
import tensorflow.compat.v1 as tf
from object_detection.models import ssd_feature_extractor_test
from object_detection.models import ssd_mobilenet_v2_fpn_feature_extractor
-from object_detection.models import ssd_mobilenet_v2_fpn_keras_feature_extractor
+from object_detection.utils import tf_version
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.')
@parameterized.parameters(
{
- 'use_depthwise': False,
- 'use_keras': True
+ 'use_depthwise': False
},
{
- 'use_depthwise': True,
- 'use_keras': True
- },
- {
- 'use_depthwise': False,
- 'use_keras': False
- },
- {
- 'use_depthwise': True,
- 'use_keras': False
+ 'use_depthwise': True
},
)
class SsdMobilenetV2FpnFeatureExtractorTest(
@@ -71,34 +63,20 @@ class SsdMobilenetV2FpnFeatureExtractorTest(
Returns:
an ssd_meta_arch.SSDFeatureExtractor object.
"""
+ del use_keras
min_depth = 32
- if use_keras:
- return (ssd_mobilenet_v2_fpn_keras_feature_extractor
- .SSDMobileNetV2FpnKerasFeatureExtractor(
- is_training=is_training,
- depth_multiplier=depth_multiplier,
- min_depth=min_depth,
- pad_to_multiple=pad_to_multiple,
- conv_hyperparams=self._build_conv_hyperparams(
- add_batch_norm=False),
- freeze_batchnorm=False,
- inplace_batchnorm_update=False,
- use_explicit_padding=use_explicit_padding,
- use_depthwise=use_depthwise,
- name='MobilenetV2_FPN'))
- else:
- return (ssd_mobilenet_v2_fpn_feature_extractor
- .SSDMobileNetV2FpnFeatureExtractor(
- is_training,
- depth_multiplier,
- min_depth,
- pad_to_multiple,
- self.conv_hyperparams_fn,
- use_depthwise=use_depthwise,
- use_explicit_padding=use_explicit_padding))
+ return (ssd_mobilenet_v2_fpn_feature_extractor
+ .SSDMobileNetV2FpnFeatureExtractor(
+ is_training,
+ depth_multiplier,
+ min_depth,
+ pad_to_multiple,
+ self.conv_hyperparams_fn,
+ use_depthwise=use_depthwise,
+ use_explicit_padding=use_explicit_padding))
- def test_extract_features_returns_correct_shapes_256(self, use_keras,
- use_depthwise):
+ def test_extract_features_returns_correct_shapes_256(self, use_depthwise):
+ use_keras = False
image_height = 256
image_width = 256
depth_multiplier = 1.0
@@ -127,8 +105,8 @@ class SsdMobilenetV2FpnFeatureExtractorTest(
use_keras=use_keras,
use_depthwise=use_depthwise)
- def test_extract_features_returns_correct_shapes_384(self, use_keras,
- use_depthwise):
+ def test_extract_features_returns_correct_shapes_384(self, use_depthwise):
+ use_keras = False
image_height = 320
image_width = 320
depth_multiplier = 1.0
@@ -157,8 +135,9 @@ class SsdMobilenetV2FpnFeatureExtractorTest(
use_keras=use_keras,
use_depthwise=use_depthwise)
- def test_extract_features_with_dynamic_image_shape(self, use_keras,
+ def test_extract_features_with_dynamic_image_shape(self,
use_depthwise):
+ use_keras = False
image_height = 256
image_width = 256
depth_multiplier = 1.0
@@ -188,7 +167,8 @@ class SsdMobilenetV2FpnFeatureExtractorTest(
use_depthwise=use_depthwise)
def test_extract_features_returns_correct_shapes_with_pad_to_multiple(
- self, use_keras, use_depthwise):
+ self, use_depthwise):
+ use_keras = False
image_height = 299
image_width = 299
depth_multiplier = 1.0
@@ -218,7 +198,8 @@ class SsdMobilenetV2FpnFeatureExtractorTest(
use_depthwise=use_depthwise)
def test_extract_features_returns_correct_shapes_enforcing_min_depth(
- self, use_keras, use_depthwise):
+ self, use_depthwise):
+ use_keras = False
image_height = 256
image_width = 256
depth_multiplier = 0.5**12
@@ -248,7 +229,8 @@ class SsdMobilenetV2FpnFeatureExtractorTest(
use_depthwise=use_depthwise)
def test_extract_features_raises_error_with_invalid_image_size(
- self, use_keras, use_depthwise):
+ self, use_depthwise):
+ use_keras = False
image_height = 32
image_width = 32
depth_multiplier = 1.0
@@ -261,8 +243,9 @@ class SsdMobilenetV2FpnFeatureExtractorTest(
use_keras=use_keras,
use_depthwise=use_depthwise)
- def test_preprocess_returns_correct_value_range(self, use_keras,
+ def test_preprocess_returns_correct_value_range(self,
use_depthwise):
+ use_keras = False
image_height = 256
image_width = 256
depth_multiplier = 1
@@ -276,7 +259,8 @@ class SsdMobilenetV2FpnFeatureExtractorTest(
preprocessed_image = feature_extractor.preprocess(test_image)
self.assertTrue(np.all(np.less_equal(np.abs(preprocessed_image), 1.0)))
- def test_variables_only_created_in_scope(self, use_keras, use_depthwise):
+ def test_variables_only_created_in_scope(self, use_depthwise):
+ use_keras = False
depth_multiplier = 1
pad_to_multiple = 1
scope_name = 'MobilenetV2'
@@ -287,7 +271,8 @@ class SsdMobilenetV2FpnFeatureExtractorTest(
use_keras=use_keras,
use_depthwise=use_depthwise)
- def test_fused_batchnorm(self, use_keras, use_depthwise):
+ def test_fused_batchnorm(self, use_depthwise):
+ use_keras = False
image_height = 256
image_width = 256
depth_multiplier = 1
@@ -300,15 +285,13 @@ class SsdMobilenetV2FpnFeatureExtractorTest(
use_keras=use_keras,
use_depthwise=use_depthwise)
preprocessed_image = feature_extractor.preprocess(image_placeholder)
- if use_keras:
- _ = feature_extractor(preprocessed_image)
- else:
- _ = feature_extractor.extract_features(preprocessed_image)
+ _ = feature_extractor.extract_features(preprocessed_image)
self.assertTrue(
any('FusedBatchNorm' in op.type
for op in tf.get_default_graph().get_operations()))
- def test_variable_count(self, use_keras, use_depthwise):
+ def test_variable_count(self, use_depthwise):
+ use_keras = False
depth_multiplier = 1
pad_to_multiple = 1
variables = self.get_feature_extractor_variables(
@@ -321,8 +304,9 @@ class SsdMobilenetV2FpnFeatureExtractorTest(
expected_variables_len = 278
self.assertEqual(len(variables), expected_variables_len)
- def test_get_expected_feature_map_variable_names(self, use_keras,
+ def test_get_expected_feature_map_variable_names(self,
use_depthwise):
+ use_keras = False
depth_multiplier = 1.0
pad_to_multiple = 1
@@ -360,44 +344,6 @@ class SsdMobilenetV2FpnFeatureExtractorTest(
'MobilenetV2/fpn/projection_2/weights',
'MobilenetV2/fpn/projection_3/weights',
])
- keras_expected_feature_maps_variables = set([
- # Keras Mobilenet V2 feature maps
- 'MobilenetV2_FPN/block_4_depthwise/depthwise_kernel',
- 'MobilenetV2_FPN/block_7_depthwise/depthwise_kernel',
- 'MobilenetV2_FPN/block_14_depthwise/depthwise_kernel',
- 'MobilenetV2_FPN/Conv_1/kernel',
- # FPN layers
- 'MobilenetV2_FPN/bottom_up_Conv2d_20_conv/kernel',
- 'MobilenetV2_FPN/bottom_up_Conv2d_21_conv/kernel',
- 'MobilenetV2_FPN/FeatureMaps/top_down/smoothing_1_conv/kernel',
- 'MobilenetV2_FPN/FeatureMaps/top_down/smoothing_2_conv/kernel',
- 'MobilenetV2_FPN/FeatureMaps/top_down/projection_1/kernel',
- 'MobilenetV2_FPN/FeatureMaps/top_down/projection_2/kernel',
- 'MobilenetV2_FPN/FeatureMaps/top_down/projection_3/kernel'
- ])
- keras_expected_feature_maps_variables_with_depthwise = set([
- # Keras Mobilenet V2 feature maps
- 'MobilenetV2_FPN/block_4_depthwise/depthwise_kernel',
- 'MobilenetV2_FPN/block_7_depthwise/depthwise_kernel',
- 'MobilenetV2_FPN/block_14_depthwise/depthwise_kernel',
- 'MobilenetV2_FPN/Conv_1/kernel',
- # FPN layers
- 'MobilenetV2_FPN/bottom_up_Conv2d_20_depthwise_conv/depthwise_kernel',
- 'MobilenetV2_FPN/bottom_up_Conv2d_20_depthwise_conv/pointwise_kernel',
- 'MobilenetV2_FPN/bottom_up_Conv2d_21_depthwise_conv/depthwise_kernel',
- 'MobilenetV2_FPN/bottom_up_Conv2d_21_depthwise_conv/pointwise_kernel',
- ('MobilenetV2_FPN/FeatureMaps/top_down/smoothing_1_depthwise_conv/'
- 'depthwise_kernel'),
- ('MobilenetV2_FPN/FeatureMaps/top_down/smoothing_1_depthwise_conv/'
- 'pointwise_kernel'),
- ('MobilenetV2_FPN/FeatureMaps/top_down/smoothing_2_depthwise_conv/'
- 'depthwise_kernel'),
- ('MobilenetV2_FPN/FeatureMaps/top_down/smoothing_2_depthwise_conv/'
- 'pointwise_kernel'),
- 'MobilenetV2_FPN/FeatureMaps/top_down/projection_1/kernel',
- 'MobilenetV2_FPN/FeatureMaps/top_down/projection_2/kernel',
- 'MobilenetV2_FPN/FeatureMaps/top_down/projection_3/kernel'
- ])
g = tf.Graph()
with g.as_default():
@@ -407,18 +353,12 @@ class SsdMobilenetV2FpnFeatureExtractorTest(
pad_to_multiple,
use_keras=use_keras,
use_depthwise=use_depthwise)
- if use_keras:
- _ = feature_extractor(preprocessed_inputs)
- expected_feature_maps_variables = keras_expected_feature_maps_variables
- if use_depthwise:
- expected_feature_maps_variables = (
- keras_expected_feature_maps_variables_with_depthwise)
- else:
- _ = feature_extractor.extract_features(preprocessed_inputs)
- expected_feature_maps_variables = slim_expected_feature_maps_variables
- if use_depthwise:
- expected_feature_maps_variables = (
- slim_expected_feature_maps_variables_with_depthwise)
+
+ _ = feature_extractor.extract_features(preprocessed_inputs)
+ expected_feature_maps_variables = slim_expected_feature_maps_variables
+ if use_depthwise:
+ expected_feature_maps_variables = (
+ slim_expected_feature_maps_variables_with_depthwise)
actual_variable_set = set([
var.op.name for var in g.get_collection(tf.GraphKeys.GLOBAL_VARIABLES)
])
diff --git a/research/object_detection/models/ssd_mobilenet_v2_fpn_feature_extractor_tf2_test.py b/research/object_detection/models/ssd_mobilenet_v2_fpn_feature_extractor_tf2_test.py
new file mode 100644
index 0000000000000000000000000000000000000000..44522ac94494430cb109e084689cc6a1a1dbeddb
--- /dev/null
+++ b/research/object_detection/models/ssd_mobilenet_v2_fpn_feature_extractor_tf2_test.py
@@ -0,0 +1,269 @@
+# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+
+"""Tests for ssd_mobilenet_v2_fpn_feature_extractor.
+
+By using parameterized test decorator, this test serves for both Slim-based and
+Keras-based Mobilenet V2 FPN feature extractors in SSD.
+"""
+import unittest
+from absl.testing import parameterized
+import numpy as np
+import tensorflow.compat.v1 as tf
+
+from object_detection.models import ssd_feature_extractor_test
+from object_detection.models import ssd_mobilenet_v2_fpn_keras_feature_extractor
+from object_detection.utils import tf_version
+
+
+@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.')
+@parameterized.parameters(
+ {
+ 'use_depthwise': False,
+ },
+ {
+ 'use_depthwise': True,
+ },
+)
+class SsdMobilenetV2FpnFeatureExtractorTest(
+ ssd_feature_extractor_test.SsdFeatureExtractorTestBase):
+
+ def _create_feature_extractor(self,
+ depth_multiplier,
+ pad_to_multiple,
+ is_training=True,
+ use_explicit_padding=False,
+ use_keras=False,
+ use_depthwise=False):
+ """Constructs a new feature extractor.
+
+ Args:
+ depth_multiplier: float depth multiplier for feature extractor
+ pad_to_multiple: the nearest multiple to zero pad the input height and
+ width dimensions to.
+ is_training: whether the network is in training mode.
+ use_explicit_padding: Use 'VALID' padding for convolutions, but prepad
+ inputs so that the output dimensions are the same as if 'SAME' padding
+ were used.
+ use_keras: if True builds a keras-based feature extractor, if False builds
+ a slim-based one.
+ use_depthwise: Whether to use depthwise convolutions.
+ Returns:
+ an ssd_meta_arch.SSDFeatureExtractor object.
+ """
+ del use_keras
+ min_depth = 32
+ return (ssd_mobilenet_v2_fpn_keras_feature_extractor
+ .SSDMobileNetV2FpnKerasFeatureExtractor(
+ is_training=is_training,
+ depth_multiplier=depth_multiplier,
+ min_depth=min_depth,
+ pad_to_multiple=pad_to_multiple,
+ conv_hyperparams=self._build_conv_hyperparams(
+ add_batch_norm=False),
+ freeze_batchnorm=False,
+ inplace_batchnorm_update=False,
+ use_explicit_padding=use_explicit_padding,
+ use_depthwise=use_depthwise,
+ name='MobilenetV2_FPN'))
+
+ def test_extract_features_returns_correct_shapes_256(self,
+ use_depthwise):
+ use_keras = True
+ image_height = 256
+ image_width = 256
+ depth_multiplier = 1.0
+ pad_to_multiple = 1
+ expected_feature_map_shape = [(2, 32, 32, 256), (2, 16, 16, 256),
+ (2, 8, 8, 256), (2, 4, 4, 256),
+ (2, 2, 2, 256)]
+ self.check_extract_features_returns_correct_shape(
+ 2,
+ image_height,
+ image_width,
+ depth_multiplier,
+ pad_to_multiple,
+ expected_feature_map_shape,
+ use_explicit_padding=False,
+ use_keras=use_keras,
+ use_depthwise=use_depthwise)
+ self.check_extract_features_returns_correct_shape(
+ 2,
+ image_height,
+ image_width,
+ depth_multiplier,
+ pad_to_multiple,
+ expected_feature_map_shape,
+ use_explicit_padding=True,
+ use_keras=use_keras,
+ use_depthwise=use_depthwise)
+
+ def test_extract_features_returns_correct_shapes_384(self,
+ use_depthwise):
+ use_keras = True
+ image_height = 320
+ image_width = 320
+ depth_multiplier = 1.0
+ pad_to_multiple = 1
+ expected_feature_map_shape = [(2, 40, 40, 256), (2, 20, 20, 256),
+ (2, 10, 10, 256), (2, 5, 5, 256),
+ (2, 3, 3, 256)]
+ self.check_extract_features_returns_correct_shape(
+ 2,
+ image_height,
+ image_width,
+ depth_multiplier,
+ pad_to_multiple,
+ expected_feature_map_shape,
+ use_explicit_padding=False,
+ use_keras=use_keras,
+ use_depthwise=use_depthwise)
+ self.check_extract_features_returns_correct_shape(
+ 2,
+ image_height,
+ image_width,
+ depth_multiplier,
+ pad_to_multiple,
+ expected_feature_map_shape,
+ use_explicit_padding=True,
+ use_keras=use_keras,
+ use_depthwise=use_depthwise)
+
+ def test_extract_features_with_dynamic_image_shape(self,
+ use_depthwise):
+ use_keras = True
+ image_height = 256
+ image_width = 256
+ depth_multiplier = 1.0
+ pad_to_multiple = 1
+ expected_feature_map_shape = [(2, 32, 32, 256), (2, 16, 16, 256),
+ (2, 8, 8, 256), (2, 4, 4, 256),
+ (2, 2, 2, 256)]
+ self.check_extract_features_returns_correct_shapes_with_dynamic_inputs(
+ 2,
+ image_height,
+ image_width,
+ depth_multiplier,
+ pad_to_multiple,
+ expected_feature_map_shape,
+ use_explicit_padding=False,
+ use_keras=use_keras,
+ use_depthwise=use_depthwise)
+ self.check_extract_features_returns_correct_shapes_with_dynamic_inputs(
+ 2,
+ image_height,
+ image_width,
+ depth_multiplier,
+ pad_to_multiple,
+ expected_feature_map_shape,
+ use_explicit_padding=True,
+ use_keras=use_keras,
+ use_depthwise=use_depthwise)
+
+ def test_extract_features_returns_correct_shapes_with_pad_to_multiple(
+ self, use_depthwise):
+ use_keras = True
+ image_height = 299
+ image_width = 299
+ depth_multiplier = 1.0
+ pad_to_multiple = 32
+ expected_feature_map_shape = [(2, 40, 40, 256), (2, 20, 20, 256),
+ (2, 10, 10, 256), (2, 5, 5, 256),
+ (2, 3, 3, 256)]
+ self.check_extract_features_returns_correct_shape(
+ 2,
+ image_height,
+ image_width,
+ depth_multiplier,
+ pad_to_multiple,
+ expected_feature_map_shape,
+ use_explicit_padding=False,
+ use_keras=use_keras,
+ use_depthwise=use_depthwise)
+ self.check_extract_features_returns_correct_shape(
+ 2,
+ image_height,
+ image_width,
+ depth_multiplier,
+ pad_to_multiple,
+ expected_feature_map_shape,
+ use_explicit_padding=True,
+ use_keras=use_keras,
+ use_depthwise=use_depthwise)
+
+ def test_extract_features_returns_correct_shapes_enforcing_min_depth(
+ self, use_depthwise):
+ use_keras = True
+ image_height = 256
+ image_width = 256
+ depth_multiplier = 0.5**12
+ pad_to_multiple = 1
+ expected_feature_map_shape = [(2, 32, 32, 32), (2, 16, 16, 32),
+ (2, 8, 8, 32), (2, 4, 4, 32),
+ (2, 2, 2, 32)]
+ self.check_extract_features_returns_correct_shape(
+ 2,
+ image_height,
+ image_width,
+ depth_multiplier,
+ pad_to_multiple,
+ expected_feature_map_shape,
+ use_explicit_padding=False,
+ use_keras=use_keras,
+ use_depthwise=use_depthwise)
+ self.check_extract_features_returns_correct_shape(
+ 2,
+ image_height,
+ image_width,
+ depth_multiplier,
+ pad_to_multiple,
+ expected_feature_map_shape,
+ use_explicit_padding=True,
+ use_keras=use_keras,
+ use_depthwise=use_depthwise)
+
+ def test_extract_features_raises_error_with_invalid_image_size(
+ self, use_depthwise=False):
+ use_keras = True
+ image_height = 32
+ image_width = 32
+ depth_multiplier = 1.0
+ pad_to_multiple = 1
+ self.check_extract_features_raises_error_with_invalid_image_size(
+ image_height,
+ image_width,
+ depth_multiplier,
+ pad_to_multiple,
+ use_keras=use_keras,
+ use_depthwise=use_depthwise)
+
+ def test_preprocess_returns_correct_value_range(self,
+ use_depthwise):
+ use_keras = True
+ image_height = 256
+ image_width = 256
+ depth_multiplier = 1
+ pad_to_multiple = 1
+ test_image = np.random.rand(2, image_height, image_width, 3)
+ feature_extractor = self._create_feature_extractor(
+ depth_multiplier,
+ pad_to_multiple,
+ use_keras=use_keras,
+ use_depthwise=use_depthwise)
+ preprocessed_image = feature_extractor.preprocess(test_image)
+ self.assertTrue(np.all(np.less_equal(np.abs(preprocessed_image), 1.0)))
+
+if __name__ == '__main__':
+ tf.test.main()
diff --git a/research/object_detection/models/ssd_mobilenet_v2_fpn_keras_feature_extractor.py b/research/object_detection/models/ssd_mobilenet_v2_fpn_keras_feature_extractor.py
index f01bec9c5b53026113dc74324a739eb13fa48d3d..0834ea6b9db2d853b06392b48b594a7c9a5f301b 100644
--- a/research/object_detection/models/ssd_mobilenet_v2_fpn_keras_feature_extractor.py
+++ b/research/object_detection/models/ssd_mobilenet_v2_fpn_keras_feature_extractor.py
@@ -123,7 +123,7 @@ class SSDMobileNetV2FpnKerasFeatureExtractor(
self._conv_defs = _create_modified_mobilenet_config()
self._use_native_resize_op = use_native_resize_op
self._feature_blocks = ['layer_4', 'layer_7', 'layer_14', 'layer_19']
- self._mobilenet_v2 = None
+ self.classification_backbone = None
self._fpn_features_generator = None
self._coarse_feature_layers = []
@@ -147,7 +147,7 @@ class SSDMobileNetV2FpnKerasFeatureExtractor(
outputs.append(full_mobilenet_v2.get_layer(output_layer_name).output)
layer_19 = full_mobilenet_v2.get_layer(name='out_relu').output
outputs.append(layer_19)
- self._mobilenet_v2 = tf.keras.Model(
+ self.classification_backbone = tf.keras.Model(
inputs=full_mobilenet_v2.inputs,
outputs=outputs)
# pylint:disable=g-long-lambda
@@ -216,7 +216,7 @@ class SSDMobileNetV2FpnKerasFeatureExtractor(
preprocessed_inputs = shape_utils.check_min_image_dim(
33, preprocessed_inputs)
- image_features = self._mobilenet_v2(
+ image_features = self.classification_backbone(
ops.pad_to_multiple(preprocessed_inputs, self._pad_to_multiple))
feature_block_list = []
diff --git a/research/object_detection/models/ssd_mobilenet_v2_keras_feature_extractor.py b/research/object_detection/models/ssd_mobilenet_v2_keras_feature_extractor.py
index e9260cd7af7fb251e8da191cee5dd984a19aec31..0f79fc271d55edbc0e61384948bd816fa6f9cd3b 100644
--- a/research/object_detection/models/ssd_mobilenet_v2_keras_feature_extractor.py
+++ b/research/object_detection/models/ssd_mobilenet_v2_keras_feature_extractor.py
@@ -97,7 +97,7 @@ class SSDMobileNetV2KerasFeatureExtractor(
'use_explicit_padding': self._use_explicit_padding,
}
- self.mobilenet_v2 = None
+ self.classification_backbone = None
self.feature_map_generator = None
def build(self, input_shape):
@@ -114,7 +114,7 @@ class SSDMobileNetV2KerasFeatureExtractor(
conv2d_11_pointwise = full_mobilenet_v2.get_layer(
name='block_13_expand_relu').output
conv2d_13_pointwise = full_mobilenet_v2.get_layer(name='out_relu').output
- self.mobilenet_v2 = tf.keras.Model(
+ self.classification_backbone = tf.keras.Model(
inputs=full_mobilenet_v2.inputs,
outputs=[conv2d_11_pointwise, conv2d_13_pointwise])
self.feature_map_generator = (
@@ -158,7 +158,7 @@ class SSDMobileNetV2KerasFeatureExtractor(
preprocessed_inputs = shape_utils.check_min_image_dim(
33, preprocessed_inputs)
- image_features = self.mobilenet_v2(
+ image_features = self.classification_backbone(
ops.pad_to_multiple(preprocessed_inputs, self._pad_to_multiple))
feature_maps = self.feature_map_generator({
diff --git a/research/object_detection/models/ssd_mobilenet_v2_mnasfpn_feature_extractor_test.py b/research/object_detection/models/ssd_mobilenet_v2_mnasfpn_feature_extractor_tf1_test.py
similarity index 96%
rename from research/object_detection/models/ssd_mobilenet_v2_mnasfpn_feature_extractor_test.py
rename to research/object_detection/models/ssd_mobilenet_v2_mnasfpn_feature_extractor_tf1_test.py
index dd9aae976665dc535c07d49de6eeb9292b6b1dd0..032433128de057c97a422c97e96d16bd2942f62b 100644
--- a/research/object_detection/models/ssd_mobilenet_v2_mnasfpn_feature_extractor_test.py
+++ b/research/object_detection/models/ssd_mobilenet_v2_mnasfpn_feature_extractor_tf1_test.py
@@ -14,13 +14,16 @@
# limitations under the License.
# ==============================================================================
"""Tests for ssd_mobilenet_v2_nas_fpn_feature_extractor."""
+import unittest
import numpy as np
import tensorflow.compat.v1 as tf
from object_detection.models import ssd_feature_extractor_test
from object_detection.models import ssd_mobilenet_v2_mnasfpn_feature_extractor as mnasfpn_feature_extractor
+from object_detection.utils import tf_version
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.')
class SsdMobilenetV2MnasFPNFeatureExtractorTest(
ssd_feature_extractor_test.SsdFeatureExtractorTestBase):
diff --git a/research/object_detection/models/ssd_mobilenet_v3_feature_extractor_test.py b/research/object_detection/models/ssd_mobilenet_v3_feature_extractor_tf1_test.py
similarity index 95%
rename from research/object_detection/models/ssd_mobilenet_v3_feature_extractor_test.py
rename to research/object_detection/models/ssd_mobilenet_v3_feature_extractor_tf1_test.py
index 38621744daa19ebc25e07e4a933694ae9e3d7e76..43c02490a7358820404380d20aa1d2190fce01a1 100644
--- a/research/object_detection/models/ssd_mobilenet_v3_feature_extractor_test.py
+++ b/research/object_detection/models/ssd_mobilenet_v3_feature_extractor_tf1_test.py
@@ -13,17 +13,15 @@
# limitations under the License.
# ==============================================================================
"""Tests for ssd_mobilenet_v3_feature_extractor."""
-
+import unittest
import tensorflow.compat.v1 as tf
-import tf_slim as slim
from object_detection.models import ssd_mobilenet_v3_feature_extractor
from object_detection.models import ssd_mobilenet_v3_feature_extractor_testbase
+from object_detection.utils import tf_version
-slim = slim
-
-
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.')
class SsdMobilenetV3LargeFeatureExtractorTest(
ssd_mobilenet_v3_feature_extractor_testbase
._SsdMobilenetV3FeatureExtractorTestBase):
@@ -63,6 +61,7 @@ class SsdMobilenetV3LargeFeatureExtractorTest(
use_explicit_padding=use_explicit_padding))
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.')
class SsdMobilenetV3SmallFeatureExtractorTest(
ssd_mobilenet_v3_feature_extractor_testbase
._SsdMobilenetV3FeatureExtractorTestBase):
diff --git a/research/object_detection/models/ssd_pnasnet_feature_extractor.py b/research/object_detection/models/ssd_pnasnet_feature_extractor.py
index 802c839484eaabf746f848930da50af44f8a1f00..48f1dee3b4f6aceffd87b995bebb06a88b25c4ca 100644
--- a/research/object_detection/models/ssd_pnasnet_feature_extractor.py
+++ b/research/object_detection/models/ssd_pnasnet_feature_extractor.py
@@ -27,7 +27,10 @@ from object_detection.models import feature_map_generators
from object_detection.utils import context_manager
from object_detection.utils import ops
from object_detection.utils import variables_helper
-from nets.nasnet import pnasnet
+try:
+ from nets.nasnet import pnasnet # pylint: disable=g-import-not-at-top
+except: # pylint: disable=bare-except
+ pass
def pnasnet_large_arg_scope_for_detection(is_batch_norm_training=False):
diff --git a/research/object_detection/models/ssd_pnasnet_feature_extractor_test.py b/research/object_detection/models/ssd_pnasnet_feature_extractor_tf1_test.py
similarity index 97%
rename from research/object_detection/models/ssd_pnasnet_feature_extractor_test.py
rename to research/object_detection/models/ssd_pnasnet_feature_extractor_tf1_test.py
index 1f2fb0f836b2f050906caed9a202c0d613d57375..d5f5bff92d9f7da6fbf8243dd3dc1dff0bc9e628 100644
--- a/research/object_detection/models/ssd_pnasnet_feature_extractor_test.py
+++ b/research/object_detection/models/ssd_pnasnet_feature_extractor_tf1_test.py
@@ -14,13 +14,16 @@
# ==============================================================================
"""Tests for ssd_pnas_feature_extractor."""
+import unittest
import numpy as np
import tensorflow.compat.v1 as tf
from object_detection.models import ssd_feature_extractor_test
from object_detection.models import ssd_pnasnet_feature_extractor
+from object_detection.utils import tf_version
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.')
class SsdPnasNetFeatureExtractorTest(
ssd_feature_extractor_test.SsdFeatureExtractorTestBase):
diff --git a/research/object_detection/models/ssd_resnet_v1_fpn_feature_extractor_test.py b/research/object_detection/models/ssd_resnet_v1_fpn_feature_extractor_test.py
deleted file mode 100644
index ddd4b0811a0e9c6527451dfaa149992efa86e4c0..0000000000000000000000000000000000000000
--- a/research/object_detection/models/ssd_resnet_v1_fpn_feature_extractor_test.py
+++ /dev/null
@@ -1,126 +0,0 @@
-# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ==============================================================================
-"""Tests for ssd resnet v1 FPN feature extractors."""
-import tensorflow.compat.v1 as tf
-
-from object_detection.models import ssd_resnet_v1_fpn_feature_extractor
-from object_detection.models import ssd_resnet_v1_fpn_feature_extractor_testbase
-from object_detection.models import ssd_resnet_v1_fpn_keras_feature_extractor
-
-
-class SSDResnet50V1FeatureExtractorTest(
- ssd_resnet_v1_fpn_feature_extractor_testbase.
- SSDResnetFPNFeatureExtractorTestBase):
- """SSDResnet50v1Fpn feature extractor test."""
-
- def _create_feature_extractor(self, depth_multiplier, pad_to_multiple,
- use_explicit_padding=False, min_depth=32,
- use_keras=False):
- is_training = True
- if use_keras:
- return (ssd_resnet_v1_fpn_keras_feature_extractor.
- SSDResNet50V1FpnKerasFeatureExtractor(
- is_training=is_training,
- depth_multiplier=depth_multiplier,
- min_depth=min_depth,
- pad_to_multiple=pad_to_multiple,
- conv_hyperparams=self._build_conv_hyperparams(
- add_batch_norm=False),
- freeze_batchnorm=False,
- inplace_batchnorm_update=False,
- name='ResNet50V1_FPN'))
- else:
- return (
- ssd_resnet_v1_fpn_feature_extractor.SSDResnet50V1FpnFeatureExtractor(
- is_training, depth_multiplier, min_depth, pad_to_multiple,
- self.conv_hyperparams_fn,
- use_explicit_padding=use_explicit_padding))
-
- def _resnet_scope_name(self, use_keras=False):
- if use_keras:
- return 'ResNet50V1_FPN'
- return 'resnet_v1_50'
-
-
-class SSDResnet101V1FeatureExtractorTest(
- ssd_resnet_v1_fpn_feature_extractor_testbase.
- SSDResnetFPNFeatureExtractorTestBase):
- """SSDResnet101v1Fpn feature extractor test."""
-
- def _create_feature_extractor(self, depth_multiplier, pad_to_multiple,
- use_explicit_padding=False, min_depth=32,
- use_keras=False):
- is_training = True
- if use_keras:
- return (ssd_resnet_v1_fpn_keras_feature_extractor.
- SSDResNet101V1FpnKerasFeatureExtractor(
- is_training=is_training,
- depth_multiplier=depth_multiplier,
- min_depth=min_depth,
- pad_to_multiple=pad_to_multiple,
- conv_hyperparams=self._build_conv_hyperparams(
- add_batch_norm=False),
- freeze_batchnorm=False,
- inplace_batchnorm_update=False,
- name='ResNet101V1_FPN'))
- else:
- return (
- ssd_resnet_v1_fpn_feature_extractor.SSDResnet101V1FpnFeatureExtractor(
- is_training, depth_multiplier, min_depth, pad_to_multiple,
- self.conv_hyperparams_fn,
- use_explicit_padding=use_explicit_padding))
-
- def _resnet_scope_name(self, use_keras):
- if use_keras:
- return 'ResNet101V1_FPN'
- return 'resnet_v1_101'
-
-
-class SSDResnet152V1FeatureExtractorTest(
- ssd_resnet_v1_fpn_feature_extractor_testbase.
- SSDResnetFPNFeatureExtractorTestBase):
- """SSDResnet152v1Fpn feature extractor test."""
-
- def _create_feature_extractor(self, depth_multiplier, pad_to_multiple,
- use_explicit_padding=False, min_depth=32,
- use_keras=False):
- is_training = True
- if use_keras:
- return (ssd_resnet_v1_fpn_keras_feature_extractor.
- SSDResNet152V1FpnKerasFeatureExtractor(
- is_training=is_training,
- depth_multiplier=depth_multiplier,
- min_depth=min_depth,
- pad_to_multiple=pad_to_multiple,
- conv_hyperparams=self._build_conv_hyperparams(
- add_batch_norm=False),
- freeze_batchnorm=False,
- inplace_batchnorm_update=False,
- name='ResNet152V1_FPN'))
- else:
- return (
- ssd_resnet_v1_fpn_feature_extractor.SSDResnet152V1FpnFeatureExtractor(
- is_training, depth_multiplier, min_depth, pad_to_multiple,
- self.conv_hyperparams_fn,
- use_explicit_padding=use_explicit_padding))
-
- def _resnet_scope_name(self, use_keras):
- if use_keras:
- return 'ResNet152V1_FPN'
- return 'resnet_v1_152'
-
-
-if __name__ == '__main__':
- tf.test.main()
diff --git a/research/object_detection/models/ssd_resnet_v1_fpn_feature_extractor_testbase.py b/research/object_detection/models/ssd_resnet_v1_fpn_feature_extractor_testbase.py
index c3854444dadb3ef8bf76ff65bd9013d382648848..1ccad530ed5f34da2bd903c23b1d974f86a9d933 100644
--- a/research/object_detection/models/ssd_resnet_v1_fpn_feature_extractor_testbase.py
+++ b/research/object_detection/models/ssd_resnet_v1_fpn_feature_extractor_testbase.py
@@ -19,24 +19,20 @@ from __future__ import division
from __future__ import print_function
import abc
-from absl.testing import parameterized
import numpy as np
from six.moves import zip
import tensorflow.compat.v1 as tf
from object_detection.models import ssd_feature_extractor_test
+from object_detection.utils import test_utils
-@parameterized.parameters(
- {'use_keras': False},
- {'use_keras': True},
-)
class SSDResnetFPNFeatureExtractorTestBase(
ssd_feature_extractor_test.SsdFeatureExtractorTestBase):
"""Helper test class for SSD Resnet v1 FPN feature extractors."""
@abc.abstractmethod
- def _resnet_scope_name(self, use_keras):
+ def _resnet_scope_name(self):
pass
@abc.abstractmethod
@@ -52,7 +48,7 @@ class SSDResnetFPNFeatureExtractorTestBase(
use_keras=False):
pass
- def test_extract_features_returns_correct_shapes_256(self, use_keras):
+ def test_extract_features_returns_correct_shapes_256(self):
image_height = 256
image_width = 256
depth_multiplier = 1.0
@@ -62,10 +58,10 @@ class SSDResnetFPNFeatureExtractorTestBase(
(2, 2, 2, 256)]
self.check_extract_features_returns_correct_shape(
2, image_height, image_width, depth_multiplier, pad_to_multiple,
- expected_feature_map_shape)
+ expected_feature_map_shape, use_keras=self.is_tf2())
def test_extract_features_returns_correct_shapes_with_dynamic_inputs(
- self, use_keras):
+ self):
image_height = 256
image_width = 256
depth_multiplier = 1.0
@@ -75,10 +71,10 @@ class SSDResnetFPNFeatureExtractorTestBase(
(2, 2, 2, 256)]
self.check_extract_features_returns_correct_shapes_with_dynamic_inputs(
2, image_height, image_width, depth_multiplier, pad_to_multiple,
- expected_feature_map_shape, use_keras=use_keras)
+ expected_feature_map_shape, use_keras=self.is_tf2())
def test_extract_features_returns_correct_shapes_with_depth_multiplier(
- self, use_keras):
+ self):
image_height = 256
image_width = 256
depth_multiplier = 0.5
@@ -91,10 +87,10 @@ class SSDResnetFPNFeatureExtractorTestBase(
(2, 2, 2, expected_num_channels)]
self.check_extract_features_returns_correct_shape(
2, image_height, image_width, depth_multiplier, pad_to_multiple,
- expected_feature_map_shape, use_keras=use_keras)
+ expected_feature_map_shape, use_keras=self.is_tf2())
def test_extract_features_returns_correct_shapes_with_min_depth(
- self, use_keras):
+ self):
image_height = 256
image_width = 256
depth_multiplier = 1.0
@@ -106,23 +102,24 @@ class SSDResnetFPNFeatureExtractorTestBase(
(2, 4, 4, min_depth),
(2, 2, 2, min_depth)]
- def graph_fn(image_tensor):
+ with test_utils.GraphContextOrNone() as g:
+ image_tensor = tf.random.uniform([2, image_height, image_width, 3])
feature_extractor = self._create_feature_extractor(
depth_multiplier, pad_to_multiple, min_depth=min_depth,
- use_keras=use_keras)
- if use_keras:
+ use_keras=self.is_tf2())
+
+ def graph_fn():
+ if self.is_tf2():
return feature_extractor(image_tensor)
return feature_extractor.extract_features(image_tensor)
- image_tensor = np.random.rand(2, image_height, image_width,
- 3).astype(np.float32)
- feature_maps = self.execute(graph_fn, [image_tensor])
+ feature_maps = self.execute(graph_fn, [], graph=g)
for feature_map, expected_shape in zip(feature_maps,
expected_feature_map_shape):
self.assertAllEqual(feature_map.shape, expected_shape)
def test_extract_features_returns_correct_shapes_with_pad_to_multiple(
- self, use_keras):
+ self):
image_height = 254
image_width = 254
depth_multiplier = 1.0
@@ -133,55 +130,62 @@ class SSDResnetFPNFeatureExtractorTestBase(
self.check_extract_features_returns_correct_shape(
2, image_height, image_width, depth_multiplier, pad_to_multiple,
- expected_feature_map_shape, use_keras=use_keras)
+ expected_feature_map_shape, use_keras=self.is_tf2())
def test_extract_features_raises_error_with_invalid_image_size(
- self, use_keras):
+ self):
image_height = 32
image_width = 32
depth_multiplier = 1.0
pad_to_multiple = 1
self.check_extract_features_raises_error_with_invalid_image_size(
image_height, image_width, depth_multiplier, pad_to_multiple,
- use_keras=use_keras)
+ use_keras=self.is_tf2())
- def test_preprocess_returns_correct_value_range(self, use_keras):
+ def test_preprocess_returns_correct_value_range(self):
image_height = 128
image_width = 128
depth_multiplier = 1
pad_to_multiple = 1
- test_image = tf.constant(np.random.rand(4, image_height, image_width, 3))
- feature_extractor = self._create_feature_extractor(depth_multiplier,
- pad_to_multiple,
- use_keras=use_keras)
- preprocessed_image = feature_extractor.preprocess(test_image)
- with self.test_session() as sess:
- test_image_out, preprocessed_image_out = sess.run(
- [test_image, preprocessed_image])
- self.assertAllClose(preprocessed_image_out,
- test_image_out - [[123.68, 116.779, 103.939]])
-
- def test_variables_only_created_in_scope(self, use_keras):
+ test_image_np = np.random.rand(4, image_height, image_width, 3)
+ with test_utils.GraphContextOrNone() as g:
+ test_image = tf.constant(test_image_np)
+ feature_extractor = self._create_feature_extractor(
+ depth_multiplier, pad_to_multiple, use_keras=self.is_tf2())
+
+ def graph_fn():
+ preprocessed_image = feature_extractor.preprocess(test_image)
+ return preprocessed_image
+
+ preprocessed_image_out = self.execute(graph_fn, [], graph=g)
+ self.assertAllClose(preprocessed_image_out,
+ test_image_np - [[123.68, 116.779, 103.939]])
+
+ def test_variables_only_created_in_scope(self):
+ if self.is_tf2():
+ self.skipTest('test_variables_only_created_in_scope is only tf1')
depth_multiplier = 1
pad_to_multiple = 1
- scope_name = self._resnet_scope_name(use_keras)
+ scope_name = self._resnet_scope_name()
self.check_feature_extractor_variables_under_scope(
depth_multiplier,
pad_to_multiple,
scope_name,
- use_keras=use_keras)
+ use_keras=self.is_tf2())
- def test_variable_count(self, use_keras):
+ def test_variable_count(self):
+ if self.is_tf2():
+ self.skipTest('test_variable_count is only tf1')
depth_multiplier = 1
pad_to_multiple = 1
variables = self.get_feature_extractor_variables(
depth_multiplier,
pad_to_multiple,
- use_keras=use_keras)
+ use_keras=self.is_tf2())
# The number of expected variables in resnet_v1_50, resnet_v1_101,
# and resnet_v1_152 is 279, 534, and 789 respectively.
expected_variables_len = 279
- scope_name = self._resnet_scope_name(use_keras)
+ scope_name = self._resnet_scope_name()
if scope_name in ('ResNet101V1_FPN', 'resnet_v1_101'):
expected_variables_len = 534
elif scope_name in ('ResNet152V1_FPN', 'resnet_v1_152'):
diff --git a/research/object_detection/models/ssd_resnet_v1_fpn_feature_extractor_tf1_test.py b/research/object_detection/models/ssd_resnet_v1_fpn_feature_extractor_tf1_test.py
new file mode 100644
index 0000000000000000000000000000000000000000..58952ff9486d6be3f077c9e21788ce8409806d18
--- /dev/null
+++ b/research/object_detection/models/ssd_resnet_v1_fpn_feature_extractor_tf1_test.py
@@ -0,0 +1,85 @@
+# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Tests for ssd resnet v1 FPN feature extractors."""
+import unittest
+import tensorflow.compat.v1 as tf
+
+from object_detection.models import ssd_resnet_v1_fpn_feature_extractor
+from object_detection.models import ssd_resnet_v1_fpn_feature_extractor_testbase
+from object_detection.utils import tf_version
+
+
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.')
+class SSDResnet50V1FeatureExtractorTest(
+ ssd_resnet_v1_fpn_feature_extractor_testbase.
+ SSDResnetFPNFeatureExtractorTestBase):
+ """SSDResnet50v1Fpn feature extractor test."""
+
+ def _create_feature_extractor(self, depth_multiplier, pad_to_multiple,
+ use_explicit_padding=False, min_depth=32,
+ use_keras=False):
+ is_training = True
+ return (
+ ssd_resnet_v1_fpn_feature_extractor.SSDResnet50V1FpnFeatureExtractor(
+ is_training, depth_multiplier, min_depth, pad_to_multiple,
+ self.conv_hyperparams_fn,
+ use_explicit_padding=use_explicit_padding))
+
+ def _resnet_scope_name(self):
+ return 'resnet_v1_50'
+
+
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.')
+class SSDResnet101V1FeatureExtractorTest(
+ ssd_resnet_v1_fpn_feature_extractor_testbase.
+ SSDResnetFPNFeatureExtractorTestBase):
+ """SSDResnet101v1Fpn feature extractor test."""
+
+ def _create_feature_extractor(self, depth_multiplier, pad_to_multiple,
+ use_explicit_padding=False, min_depth=32,
+ use_keras=False):
+ is_training = True
+ return (
+ ssd_resnet_v1_fpn_feature_extractor.SSDResnet101V1FpnFeatureExtractor(
+ is_training, depth_multiplier, min_depth, pad_to_multiple,
+ self.conv_hyperparams_fn,
+ use_explicit_padding=use_explicit_padding))
+
+ def _resnet_scope_name(self):
+ return 'resnet_v1_101'
+
+
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.')
+class SSDResnet152V1FeatureExtractorTest(
+ ssd_resnet_v1_fpn_feature_extractor_testbase.
+ SSDResnetFPNFeatureExtractorTestBase):
+ """SSDResnet152v1Fpn feature extractor test."""
+
+ def _create_feature_extractor(self, depth_multiplier, pad_to_multiple,
+ use_explicit_padding=False, min_depth=32,
+ use_keras=False):
+ is_training = True
+ return (
+ ssd_resnet_v1_fpn_feature_extractor.SSDResnet152V1FpnFeatureExtractor(
+ is_training, depth_multiplier, min_depth, pad_to_multiple,
+ self.conv_hyperparams_fn,
+ use_explicit_padding=use_explicit_padding))
+
+ def _resnet_scope_name(self):
+ return 'resnet_v1_152'
+
+
+if __name__ == '__main__':
+ tf.test.main()
diff --git a/research/object_detection/models/ssd_resnet_v1_fpn_feature_extractor_tf2_test.py b/research/object_detection/models/ssd_resnet_v1_fpn_feature_extractor_tf2_test.py
new file mode 100644
index 0000000000000000000000000000000000000000..27c54ddd08ffa866dad4975c9bed7c629e8c46ac
--- /dev/null
+++ b/research/object_detection/models/ssd_resnet_v1_fpn_feature_extractor_tf2_test.py
@@ -0,0 +1,103 @@
+# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ==============================================================================
+"""Tests for ssd resnet v1 FPN feature extractors."""
+import unittest
+import tensorflow.compat.v1 as tf
+
+from object_detection.models import ssd_resnet_v1_fpn_feature_extractor_testbase
+from object_detection.models import ssd_resnet_v1_fpn_keras_feature_extractor
+from object_detection.utils import tf_version
+
+
+@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.')
+class SSDResnet50V1FeatureExtractorTest(
+ ssd_resnet_v1_fpn_feature_extractor_testbase.
+ SSDResnetFPNFeatureExtractorTestBase):
+ """SSDResnet50v1Fpn feature extractor test."""
+
+ def _create_feature_extractor(self, depth_multiplier, pad_to_multiple,
+ use_explicit_padding=False, min_depth=32,
+ use_keras=True):
+ is_training = True
+ return (ssd_resnet_v1_fpn_keras_feature_extractor.
+ SSDResNet50V1FpnKerasFeatureExtractor(
+ is_training=is_training,
+ depth_multiplier=depth_multiplier,
+ min_depth=min_depth,
+ pad_to_multiple=pad_to_multiple,
+ conv_hyperparams=self._build_conv_hyperparams(
+ add_batch_norm=False),
+ freeze_batchnorm=False,
+ inplace_batchnorm_update=False,
+ name='ResNet50V1_FPN'))
+
+ def _resnet_scope_name(self):
+ return 'ResNet50V1_FPN'
+
+
+@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.')
+class SSDResnet101V1FeatureExtractorTest(
+ ssd_resnet_v1_fpn_feature_extractor_testbase.
+ SSDResnetFPNFeatureExtractorTestBase):
+ """SSDResnet101v1Fpn feature extractor test."""
+
+ def _create_feature_extractor(self, depth_multiplier, pad_to_multiple,
+ use_explicit_padding=False, min_depth=32,
+ use_keras=False):
+ is_training = True
+ return (ssd_resnet_v1_fpn_keras_feature_extractor.
+ SSDResNet101V1FpnKerasFeatureExtractor(
+ is_training=is_training,
+ depth_multiplier=depth_multiplier,
+ min_depth=min_depth,
+ pad_to_multiple=pad_to_multiple,
+ conv_hyperparams=self._build_conv_hyperparams(
+ add_batch_norm=False),
+ freeze_batchnorm=False,
+ inplace_batchnorm_update=False,
+ name='ResNet101V1_FPN'))
+
+ def _resnet_scope_name(self):
+ return 'ResNet101V1_FPN'
+
+
+@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.')
+class SSDResnet152V1FeatureExtractorTest(
+ ssd_resnet_v1_fpn_feature_extractor_testbase.
+ SSDResnetFPNFeatureExtractorTestBase):
+ """SSDResnet152v1Fpn feature extractor test."""
+
+ def _create_feature_extractor(self, depth_multiplier, pad_to_multiple,
+ use_explicit_padding=False, min_depth=32,
+ use_keras=False):
+ is_training = True
+ return (ssd_resnet_v1_fpn_keras_feature_extractor.
+ SSDResNet152V1FpnKerasFeatureExtractor(
+ is_training=is_training,
+ depth_multiplier=depth_multiplier,
+ min_depth=min_depth,
+ pad_to_multiple=pad_to_multiple,
+ conv_hyperparams=self._build_conv_hyperparams(
+ add_batch_norm=False),
+ freeze_batchnorm=False,
+ inplace_batchnorm_update=False,
+ name='ResNet152V1_FPN'))
+
+ def _resnet_scope_name(self):
+ return 'ResNet152V1_FPN'
+
+
+if __name__ == '__main__':
+ tf.test.main()
diff --git a/research/object_detection/models/ssd_resnet_v1_fpn_keras_feature_extractor.py b/research/object_detection/models/ssd_resnet_v1_fpn_keras_feature_extractor.py
index 6de9ae3e5b8f24885d139b2b277b09ccd1782169..0ac929cc6349a21b541f20adb624ad157d4f4a63 100644
--- a/research/object_detection/models/ssd_resnet_v1_fpn_keras_feature_extractor.py
+++ b/research/object_detection/models/ssd_resnet_v1_fpn_keras_feature_extractor.py
@@ -246,17 +246,6 @@ class SSDResNetV1FpnKerasFeatureExtractor(
feature_maps.append(last_feature_map)
return feature_maps
- def restore_from_classification_checkpoint_fn(self, feature_extractor_scope):
- """Returns a map for restoring from an (object-based) checkpoint.
-
- Args:
- feature_extractor_scope: A scope name for the feature extractor (unused).
-
- Returns:
- A dict mapping keys to Keras models
- """
- return {'feature_extractor': self.classification_backbone}
-
class SSDResNet50V1FpnKerasFeatureExtractor(
SSDResNetV1FpnKerasFeatureExtractor):
diff --git a/research/object_detection/models/ssd_resnet_v1_ppn_feature_extractor_test.py b/research/object_detection/models/ssd_resnet_v1_ppn_feature_extractor_tf1_test.py
similarity index 92%
rename from research/object_detection/models/ssd_resnet_v1_ppn_feature_extractor_test.py
rename to research/object_detection/models/ssd_resnet_v1_ppn_feature_extractor_tf1_test.py
index bfcb74cf9619764f6ecbd9399f8607f4e6439e76..bb95cb53f3905ef9288ade7600005c1ba9372be5 100644
--- a/research/object_detection/models/ssd_resnet_v1_ppn_feature_extractor_test.py
+++ b/research/object_detection/models/ssd_resnet_v1_ppn_feature_extractor_tf1_test.py
@@ -13,12 +13,15 @@
# limitations under the License.
# ==============================================================================
"""Tests for ssd resnet v1 feature extractors."""
+import unittest
import tensorflow.compat.v1 as tf
from object_detection.models import ssd_resnet_v1_ppn_feature_extractor
from object_detection.models import ssd_resnet_v1_ppn_feature_extractor_testbase
+from object_detection.utils import tf_version
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.')
class SSDResnet50V1PpnFeatureExtractorTest(
ssd_resnet_v1_ppn_feature_extractor_testbase.
SSDResnetPpnFeatureExtractorTestBase):
@@ -40,6 +43,7 @@ class SSDResnet50V1PpnFeatureExtractorTest(
return 'resnet_v1_50'
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.')
class SSDResnet101V1PpnFeatureExtractorTest(
ssd_resnet_v1_ppn_feature_extractor_testbase.
SSDResnetPpnFeatureExtractorTestBase):
@@ -62,6 +66,7 @@ class SSDResnet101V1PpnFeatureExtractorTest(
return 'resnet_v1_101'
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.')
class SSDResnet152V1PpnFeatureExtractorTest(
ssd_resnet_v1_ppn_feature_extractor_testbase.
SSDResnetPpnFeatureExtractorTestBase):
diff --git a/research/object_detection/predictors/convolutional_box_predictor_test.py b/research/object_detection/predictors/convolutional_box_predictor_tf1_test.py
similarity index 99%
rename from research/object_detection/predictors/convolutional_box_predictor_test.py
rename to research/object_detection/predictors/convolutional_box_predictor_tf1_test.py
index eb608e1e74a87fdaabf16bbae745819c05bdf155..3236615dfb60bc848ec271fc5173b9c4169feb93 100644
--- a/research/object_detection/predictors/convolutional_box_predictor_test.py
+++ b/research/object_detection/predictors/convolutional_box_predictor_tf1_test.py
@@ -19,7 +19,7 @@
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
-
+import unittest
from absl.testing import parameterized
import numpy as np
from six.moves import range
@@ -35,8 +35,10 @@ from object_detection.predictors.heads import class_head
from object_detection.predictors.heads import mask_head
from object_detection.protos import hyperparams_pb2
from object_detection.utils import test_case
+from object_detection.utils import tf_version
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.')
class ConvolutionalBoxPredictorTest(test_case.TestCase):
def _build_arg_scope_with_conv_hyperparams(self):
@@ -281,6 +283,7 @@ class ConvolutionalBoxPredictorTest(test_case.TestCase):
self.assertEqual(bad_dangling_ops, [])
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.')
class WeightSharedConvolutionalBoxPredictorTest(test_case.TestCase):
def _build_arg_scope_with_conv_hyperparams(self):
diff --git a/research/object_detection/predictors/convolutional_keras_box_predictor.py b/research/object_detection/predictors/convolutional_keras_box_predictor.py
index 630c680398baa4a60c945a0bd2d874ea0f8c1783..fc72fb04c2d47301b1ac5fc185ca98c6b00073c0 100644
--- a/research/object_detection/predictors/convolutional_keras_box_predictor.py
+++ b/research/object_detection/predictors/convolutional_keras_box_predictor.py
@@ -314,7 +314,8 @@ class WeightSharedConvolutionalBoxPredictor(box_predictor.KerasBoxPredictor):
self, inserted_layer_counter, target_channel):
projection_layers = []
if inserted_layer_counter >= 0:
- use_bias = False if self._apply_batch_norm else True
+ use_bias = False if (self._apply_batch_norm and not
+ self._conv_hyperparams.force_use_bias()) else True
projection_layers.append(keras.Conv2D(
target_channel, [1, 1], strides=1, padding='SAME',
name='ProjectionLayer/conv2d_{}'.format(inserted_layer_counter),
@@ -331,7 +332,8 @@ class WeightSharedConvolutionalBoxPredictor(box_predictor.KerasBoxPredictor):
conv_layers = []
batch_norm_layers = []
activation_layers = []
- use_bias = False if self._apply_batch_norm else True
+ use_bias = False if (self._apply_batch_norm and not
+ self._conv_hyperparams.force_use_bias()) else True
for additional_conv_layer_idx in range(self._num_layers_before_predictor):
layer_name = '{}/conv2d_{}'.format(
tower_name_scope, additional_conv_layer_idx)
@@ -363,7 +365,9 @@ class WeightSharedConvolutionalBoxPredictor(box_predictor.KerasBoxPredictor):
training=(self._is_training and not self._freeze_batchnorm),
name='{}/conv2d_{}/BatchNorm/feature_{}'.format(
tower_name_scope, additional_conv_layer_idx, feature_index)))
- activation_layers.append(tf.keras.layers.Lambda(tf.nn.relu6))
+ activation_layers.append(self._conv_hyperparams.build_activation_layer(
+ name='{}/conv2d_{}/activation_{}'.format(
+ tower_name_scope, additional_conv_layer_idx, feature_index)))
# Set conv layers as the shared conv layers for different feature maps with
# the same tower_name_scope.
diff --git a/research/object_detection/predictors/convolutional_keras_box_predictor_test.py b/research/object_detection/predictors/convolutional_keras_box_predictor_tf2_test.py
similarity index 64%
rename from research/object_detection/predictors/convolutional_keras_box_predictor_test.py
rename to research/object_detection/predictors/convolutional_keras_box_predictor_tf2_test.py
index 5db7e962f88624e1a5663e7e2a881c0afbe290f2..180a6e94643a80ac04ee12dfacb5bc6d04e09ec8 100644
--- a/research/object_detection/predictors/convolutional_keras_box_predictor_test.py
+++ b/research/object_detection/predictors/convolutional_keras_box_predictor_tf2_test.py
@@ -14,6 +14,7 @@
# ==============================================================================
"""Tests for object_detection.predictors.convolutional_keras_box_predictor."""
+import unittest
import numpy as np
import tensorflow.compat.v1 as tf
@@ -26,8 +27,10 @@ from object_detection.predictors.heads import keras_class_head
from object_detection.predictors.heads import keras_mask_head
from object_detection.protos import hyperparams_pb2
from object_detection.utils import test_case
+from object_detection.utils import tf_version
+@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.')
class ConvolutionalKerasBoxPredictorTest(test_case.TestCase):
def _build_conv_hyperparams(self):
@@ -47,23 +50,23 @@ class ConvolutionalKerasBoxPredictorTest(test_case.TestCase):
return hyperparams_builder.KerasLayerHyperparams(conv_hyperparams)
def test_get_boxes_for_five_aspect_ratios_per_location(self):
+ conv_box_predictor = (
+ box_predictor_builder.build_convolutional_keras_box_predictor(
+ is_training=False,
+ num_classes=0,
+ conv_hyperparams=self._build_conv_hyperparams(),
+ freeze_batchnorm=False,
+ inplace_batchnorm_update=False,
+ num_predictions_per_location_list=[5],
+ min_depth=0,
+ max_depth=32,
+ num_layers_before_predictor=1,
+ use_dropout=True,
+ dropout_keep_prob=0.8,
+ kernel_size=1,
+ box_code_size=4
+ ))
def graph_fn(image_features):
- conv_box_predictor = (
- box_predictor_builder.build_convolutional_keras_box_predictor(
- is_training=False,
- num_classes=0,
- conv_hyperparams=self._build_conv_hyperparams(),
- freeze_batchnorm=False,
- inplace_batchnorm_update=False,
- num_predictions_per_location_list=[5],
- min_depth=0,
- max_depth=32,
- num_layers_before_predictor=1,
- use_dropout=True,
- dropout_keep_prob=0.8,
- kernel_size=1,
- box_code_size=4
- ))
box_predictions = conv_box_predictor([image_features])
box_encodings = tf.concat(
box_predictions[box_predictor.BOX_ENCODINGS], axis=1)
@@ -78,23 +81,23 @@ class ConvolutionalKerasBoxPredictorTest(test_case.TestCase):
self.assertAllEqual(objectness_predictions.shape, [4, 320, 1])
def test_get_boxes_for_one_aspect_ratio_per_location(self):
+ conv_box_predictor = (
+ box_predictor_builder.build_convolutional_keras_box_predictor(
+ is_training=False,
+ num_classes=0,
+ conv_hyperparams=self._build_conv_hyperparams(),
+ freeze_batchnorm=False,
+ inplace_batchnorm_update=False,
+ num_predictions_per_location_list=[1],
+ min_depth=0,
+ max_depth=32,
+ num_layers_before_predictor=1,
+ use_dropout=True,
+ dropout_keep_prob=0.8,
+ kernel_size=1,
+ box_code_size=4
+ ))
def graph_fn(image_features):
- conv_box_predictor = (
- box_predictor_builder.build_convolutional_keras_box_predictor(
- is_training=False,
- num_classes=0,
- conv_hyperparams=self._build_conv_hyperparams(),
- freeze_batchnorm=False,
- inplace_batchnorm_update=False,
- num_predictions_per_location_list=[1],
- min_depth=0,
- max_depth=32,
- num_layers_before_predictor=1,
- use_dropout=True,
- dropout_keep_prob=0.8,
- kernel_size=1,
- box_code_size=4
- ))
box_predictions = conv_box_predictor([image_features])
box_encodings = tf.concat(
box_predictions[box_predictor.BOX_ENCODINGS], axis=1)
@@ -111,23 +114,23 @@ class ConvolutionalKerasBoxPredictorTest(test_case.TestCase):
self):
num_classes_without_background = 6
image_features = np.random.rand(4, 8, 8, 64).astype(np.float32)
+ conv_box_predictor = (
+ box_predictor_builder.build_convolutional_keras_box_predictor(
+ is_training=False,
+ num_classes=num_classes_without_background,
+ conv_hyperparams=self._build_conv_hyperparams(),
+ freeze_batchnorm=False,
+ inplace_batchnorm_update=False,
+ num_predictions_per_location_list=[5],
+ min_depth=0,
+ max_depth=32,
+ num_layers_before_predictor=1,
+ use_dropout=True,
+ dropout_keep_prob=0.8,
+ kernel_size=1,
+ box_code_size=4
+ ))
def graph_fn(image_features):
- conv_box_predictor = (
- box_predictor_builder.build_convolutional_keras_box_predictor(
- is_training=False,
- num_classes=num_classes_without_background,
- conv_hyperparams=self._build_conv_hyperparams(),
- freeze_batchnorm=False,
- inplace_batchnorm_update=False,
- num_predictions_per_location_list=[5],
- min_depth=0,
- max_depth=32,
- num_layers_before_predictor=1,
- use_dropout=True,
- dropout_keep_prob=0.8,
- kernel_size=1,
- box_code_size=4
- ))
box_predictions = conv_box_predictor([image_features])
box_encodings = tf.concat(
box_predictions[box_predictor.BOX_ENCODINGS], axis=1)
@@ -144,7 +147,7 @@ class ConvolutionalKerasBoxPredictorTest(test_case.TestCase):
def test_get_predictions_with_feature_maps_of_dynamic_shape(
self):
- image_features = tf.placeholder(dtype=tf.float32, shape=[4, None, None, 64])
+ tf.keras.backend.clear_session()
conv_box_predictor = (
box_predictor_builder.build_convolutional_keras_box_predictor(
is_training=False,
@@ -161,28 +164,25 @@ class ConvolutionalKerasBoxPredictorTest(test_case.TestCase):
kernel_size=1,
box_code_size=4
))
- box_predictions = conv_box_predictor([image_features])
- box_encodings = tf.concat(
- box_predictions[box_predictor.BOX_ENCODINGS], axis=1)
- objectness_predictions = tf.concat(
- box_predictions[box_predictor.CLASS_PREDICTIONS_WITH_BACKGROUND],
- axis=1)
- init_op = tf.global_variables_initializer()
-
+ variables = []
+ def graph_fn(image_features):
+ box_predictions = conv_box_predictor([image_features])
+ variables.extend(list(conv_box_predictor.variables))
+ box_encodings = tf.concat(
+ box_predictions[box_predictor.BOX_ENCODINGS], axis=1)
+ objectness_predictions = tf.concat(
+ box_predictions[box_predictor.CLASS_PREDICTIONS_WITH_BACKGROUND],
+ axis=1)
+ return box_encodings, objectness_predictions
resolution = 32
expected_num_anchors = resolution*resolution*5
- with self.test_session() as sess:
- sess.run(init_op)
- (box_encodings_shape,
- objectness_predictions_shape) = sess.run(
- [tf.shape(box_encodings), tf.shape(objectness_predictions)],
- feed_dict={image_features:
- np.random.rand(4, resolution, resolution, 64)})
- actual_variable_set = set(
- [var.op.name for var in tf.trainable_variables()])
- self.assertAllEqual(box_encodings_shape, [4, expected_num_anchors, 1, 4])
- self.assertAllEqual(objectness_predictions_shape,
- [4, expected_num_anchors, 1])
+ box_encodings, objectness_predictions = self.execute(
+ graph_fn, [np.random.rand(4, resolution, resolution, 64)])
+
+ actual_variable_set = set([var.name.split(':')[0] for var in variables])
+ self.assertAllEqual(box_encodings.shape, [4, expected_num_anchors, 1, 4])
+ self.assertAllEqual(objectness_predictions.shape,
+ [4, expected_num_anchors, 1])
expected_variable_set = set([
'BoxPredictor/SharedConvolutions_0/Conv2d_0_1x1_32/bias',
'BoxPredictor/SharedConvolutions_0/Conv2d_0_1x1_32/kernel',
@@ -195,7 +195,7 @@ class ConvolutionalKerasBoxPredictorTest(test_case.TestCase):
['box_encodings', 'class_predictions_with_background'])
def test_use_depthwise_convolution(self):
- image_features = tf.placeholder(dtype=tf.float32, shape=[4, None, None, 64])
+ tf.keras.backend.clear_session()
conv_box_predictor = (
box_predictor_builder.build_convolutional_keras_box_predictor(
is_training=False,
@@ -213,27 +213,25 @@ class ConvolutionalKerasBoxPredictorTest(test_case.TestCase):
box_code_size=4,
use_depthwise=True
))
- box_predictions = conv_box_predictor([image_features])
- box_encodings = tf.concat(
- box_predictions[box_predictor.BOX_ENCODINGS], axis=1)
- objectness_predictions = tf.concat(
- box_predictions[box_predictor.CLASS_PREDICTIONS_WITH_BACKGROUND],
- axis=1)
- init_op = tf.global_variables_initializer()
+ variables = []
+ def graph_fn(image_features):
+ box_predictions = conv_box_predictor([image_features])
+ variables.extend(list(conv_box_predictor.variables))
+ box_encodings = tf.concat(
+ box_predictions[box_predictor.BOX_ENCODINGS], axis=1)
+ objectness_predictions = tf.concat(
+ box_predictions[box_predictor.CLASS_PREDICTIONS_WITH_BACKGROUND],
+ axis=1)
+ return box_encodings, objectness_predictions
resolution = 32
expected_num_anchors = resolution*resolution*5
- with self.test_session() as sess:
- sess.run(init_op)
- (box_encodings_shape,
- objectness_predictions_shape) = sess.run(
- [tf.shape(box_encodings), tf.shape(objectness_predictions)],
- feed_dict={image_features:
- np.random.rand(4, resolution, resolution, 64)})
- actual_variable_set = set(
- [var.op.name for var in tf.trainable_variables()])
- self.assertAllEqual(box_encodings_shape, [4, expected_num_anchors, 1, 4])
- self.assertAllEqual(objectness_predictions_shape,
+ box_encodings, objectness_predictions = self.execute(
+ graph_fn, [np.random.rand(4, resolution, resolution, 64)])
+
+ actual_variable_set = set([var.name.split(':')[0] for var in variables])
+ self.assertAllEqual(box_encodings.shape, [4, expected_num_anchors, 1, 4])
+ self.assertAllEqual(objectness_predictions.shape,
[4, expected_num_anchors, 1])
expected_variable_set = set([
'BoxPredictor/SharedConvolutions_0/Conv2d_0_1x1_32/bias',
@@ -259,6 +257,7 @@ class ConvolutionalKerasBoxPredictorTest(test_case.TestCase):
['box_encodings', 'class_predictions_with_background'])
+@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.')
class WeightSharedConvolutionalKerasBoxPredictorTest(test_case.TestCase):
def _build_conv_hyperparams(self, add_batch_norm=True):
@@ -288,19 +287,20 @@ class WeightSharedConvolutionalKerasBoxPredictorTest(test_case.TestCase):
# pylint: disable=line-too-long
def test_get_boxes_for_five_aspect_ratios_per_location(self):
+ conv_box_predictor = (
+ box_predictor_builder
+ .build_weight_shared_convolutional_keras_box_predictor(
+ is_training=False,
+ num_classes=0,
+ conv_hyperparams=self._build_conv_hyperparams(),
+ freeze_batchnorm=False,
+ inplace_batchnorm_update=False,
+ num_predictions_per_location_list=[5],
+ depth=32,
+ num_layers_before_predictor=1,
+ box_code_size=4))
def graph_fn(image_features):
- conv_box_predictor = (
- box_predictor_builder.build_weight_shared_convolutional_keras_box_predictor(
- is_training=False,
- num_classes=0,
- conv_hyperparams=self._build_conv_hyperparams(),
- freeze_batchnorm=False,
- inplace_batchnorm_update=False,
- num_predictions_per_location_list=[5],
- depth=32,
- num_layers_before_predictor=1,
- box_code_size=4))
box_predictions = conv_box_predictor([image_features])
box_encodings = tf.concat(
box_predictions[box_predictor.BOX_ENCODINGS], axis=1)
@@ -314,20 +314,21 @@ class WeightSharedConvolutionalKerasBoxPredictorTest(test_case.TestCase):
self.assertAllEqual(objectness_predictions.shape, [4, 320, 1])
def test_bias_predictions_to_background_with_sigmoid_score_conversion(self):
+ conv_box_predictor = (
+ box_predictor_builder
+ .build_weight_shared_convolutional_keras_box_predictor(
+ is_training=True,
+ num_classes=2,
+ conv_hyperparams=self._build_conv_hyperparams(),
+ freeze_batchnorm=False,
+ inplace_batchnorm_update=False,
+ num_predictions_per_location_list=[5],
+ depth=32,
+ num_layers_before_predictor=1,
+ class_prediction_bias_init=-4.6,
+ box_code_size=4))
def graph_fn(image_features):
- conv_box_predictor = (
- box_predictor_builder.build_weight_shared_convolutional_keras_box_predictor(
- is_training=True,
- num_classes=2,
- conv_hyperparams=self._build_conv_hyperparams(),
- freeze_batchnorm=False,
- inplace_batchnorm_update=False,
- num_predictions_per_location_list=[5],
- depth=32,
- num_layers_before_predictor=1,
- class_prediction_bias_init=-4.6,
- box_code_size=4))
box_predictions = conv_box_predictor([image_features])
class_predictions = tf.concat(box_predictions[
box_predictor.CLASS_PREDICTIONS_WITH_BACKGROUND], axis=1)
@@ -339,20 +340,21 @@ class WeightSharedConvolutionalKerasBoxPredictorTest(test_case.TestCase):
def test_get_multi_class_predictions_for_five_aspect_ratios_per_location(
self):
-
num_classes_without_background = 6
+ conv_box_predictor = (
+ box_predictor_builder
+ .build_weight_shared_convolutional_keras_box_predictor(
+ is_training=False,
+ num_classes=num_classes_without_background,
+ conv_hyperparams=self._build_conv_hyperparams(),
+ freeze_batchnorm=False,
+ inplace_batchnorm_update=False,
+ num_predictions_per_location_list=[5],
+ depth=32,
+ num_layers_before_predictor=1,
+ box_code_size=4))
+
def graph_fn(image_features):
- conv_box_predictor = (
- box_predictor_builder.build_weight_shared_convolutional_keras_box_predictor(
- is_training=False,
- num_classes=num_classes_without_background,
- conv_hyperparams=self._build_conv_hyperparams(),
- freeze_batchnorm=False,
- inplace_batchnorm_update=False,
- num_predictions_per_location_list=[5],
- depth=32,
- num_layers_before_predictor=1,
- box_code_size=4))
box_predictions = conv_box_predictor([image_features])
box_encodings = tf.concat(
box_predictions[box_predictor.BOX_ENCODINGS], axis=1)
@@ -369,20 +371,21 @@ class WeightSharedConvolutionalKerasBoxPredictorTest(test_case.TestCase):
def test_get_multi_class_predictions_from_two_feature_maps(
self):
-
num_classes_without_background = 6
+ conv_box_predictor = (
+ box_predictor_builder
+ .build_weight_shared_convolutional_keras_box_predictor(
+ is_training=False,
+ num_classes=num_classes_without_background,
+ conv_hyperparams=self._build_conv_hyperparams(),
+ freeze_batchnorm=False,
+ inplace_batchnorm_update=False,
+ num_predictions_per_location_list=[5, 5],
+ depth=32,
+ num_layers_before_predictor=1,
+ box_code_size=4))
+
def graph_fn(image_features1, image_features2):
- conv_box_predictor = (
- box_predictor_builder.build_weight_shared_convolutional_keras_box_predictor(
- is_training=False,
- num_classes=num_classes_without_background,
- conv_hyperparams=self._build_conv_hyperparams(),
- freeze_batchnorm=False,
- inplace_batchnorm_update=False,
- num_predictions_per_location_list=[5, 5],
- depth=32,
- num_layers_before_predictor=1,
- box_code_size=4))
box_predictions = conv_box_predictor([image_features1, image_features2])
box_encodings = tf.concat(
box_predictions[box_predictor.BOX_ENCODINGS], axis=1)
@@ -401,20 +404,21 @@ class WeightSharedConvolutionalKerasBoxPredictorTest(test_case.TestCase):
def test_get_multi_class_predictions_from_feature_maps_of_different_depth(
self):
-
num_classes_without_background = 6
+ conv_box_predictor = (
+ box_predictor_builder
+ .build_weight_shared_convolutional_keras_box_predictor(
+ is_training=False,
+ num_classes=num_classes_without_background,
+ conv_hyperparams=self._build_conv_hyperparams(),
+ freeze_batchnorm=False,
+ inplace_batchnorm_update=False,
+ num_predictions_per_location_list=[5, 5, 5],
+ depth=32,
+ num_layers_before_predictor=1,
+ box_code_size=4))
+
def graph_fn(image_features1, image_features2, image_features3):
- conv_box_predictor = (
- box_predictor_builder.build_weight_shared_convolutional_keras_box_predictor(
- is_training=False,
- num_classes=num_classes_without_background,
- conv_hyperparams=self._build_conv_hyperparams(),
- freeze_batchnorm=False,
- inplace_batchnorm_update=False,
- num_predictions_per_location_list=[5, 5, 5],
- depth=32,
- num_layers_before_predictor=1,
- box_code_size=4))
box_predictions = conv_box_predictor(
[image_features1, image_features2, image_features3])
box_encodings = tf.concat(
@@ -435,20 +439,25 @@ class WeightSharedConvolutionalKerasBoxPredictorTest(test_case.TestCase):
def test_predictions_multiple_feature_maps_share_weights_separate_batchnorm(
self):
+ tf.keras.backend.clear_session()
num_classes_without_background = 6
+ conv_box_predictor = (
+ box_predictor_builder
+ .build_weight_shared_convolutional_keras_box_predictor(
+ is_training=False,
+ num_classes=num_classes_without_background,
+ conv_hyperparams=self._build_conv_hyperparams(),
+ freeze_batchnorm=False,
+ inplace_batchnorm_update=False,
+ num_predictions_per_location_list=[5, 5],
+ depth=32,
+ num_layers_before_predictor=2,
+ box_code_size=4))
+ variables = []
+
def graph_fn(image_features1, image_features2):
- conv_box_predictor = (
- box_predictor_builder.build_weight_shared_convolutional_keras_box_predictor(
- is_training=False,
- num_classes=num_classes_without_background,
- conv_hyperparams=self._build_conv_hyperparams(),
- freeze_batchnorm=False,
- inplace_batchnorm_update=False,
- num_predictions_per_location_list=[5, 5],
- depth=32,
- num_layers_before_predictor=2,
- box_code_size=4))
box_predictions = conv_box_predictor([image_features1, image_features2])
+ variables.extend(list(conv_box_predictor.variables))
box_encodings = tf.concat(
box_predictions[box_predictor.BOX_ENCODINGS], axis=1)
class_predictions_with_background = tf.concat(
@@ -456,25 +465,41 @@ class WeightSharedConvolutionalKerasBoxPredictorTest(test_case.TestCase):
axis=1)
return (box_encodings, class_predictions_with_background)
- with self.test_session(graph=tf.Graph()):
- graph_fn(tf.random_uniform([4, 32, 32, 3], dtype=tf.float32),
- tf.random_uniform([4, 16, 16, 3], dtype=tf.float32))
- actual_variable_set = set(
- [var.op.name for var in tf.trainable_variables()])
+ self.execute(graph_fn, [
+ np.random.rand(4, 32, 32, 3).astype(np.float32),
+ np.random.rand(4, 16, 16, 3).astype(np.float32)
+ ])
+ actual_variable_set = set([var.name.split(':')[0] for var in variables])
expected_variable_set = set([
# Box prediction tower
('WeightSharedConvolutionalBoxPredictor/'
'BoxPredictionTower/conv2d_0/kernel'),
('WeightSharedConvolutionalBoxPredictor/'
'BoxPredictionTower/conv2d_0/BatchNorm/feature_0/beta'),
+ ('WeightSharedConvolutionalBoxPredictor/'
+ 'BoxPredictionTower/conv2d_0/BatchNorm/feature_0/moving_mean'),
+ ('WeightSharedConvolutionalBoxPredictor/'
+ 'BoxPredictionTower/conv2d_0/BatchNorm/feature_0/moving_variance'),
('WeightSharedConvolutionalBoxPredictor/'
'BoxPredictionTower/conv2d_0/BatchNorm/feature_1/beta'),
+ ('WeightSharedConvolutionalBoxPredictor/'
+ 'BoxPredictionTower/conv2d_0/BatchNorm/feature_1/moving_mean'),
+ ('WeightSharedConvolutionalBoxPredictor/'
+ 'BoxPredictionTower/conv2d_0/BatchNorm/feature_1/moving_variance'),
('WeightSharedConvolutionalBoxPredictor/'
'BoxPredictionTower/conv2d_1/kernel'),
('WeightSharedConvolutionalBoxPredictor/'
'BoxPredictionTower/conv2d_1/BatchNorm/feature_0/beta'),
+ ('WeightSharedConvolutionalBoxPredictor/'
+ 'BoxPredictionTower/conv2d_1/BatchNorm/feature_0/moving_mean'),
+ ('WeightSharedConvolutionalBoxPredictor/'
+ 'BoxPredictionTower/conv2d_1/BatchNorm/feature_0/moving_variance'),
('WeightSharedConvolutionalBoxPredictor/'
'BoxPredictionTower/conv2d_1/BatchNorm/feature_1/beta'),
+ ('WeightSharedConvolutionalBoxPredictor/'
+ 'BoxPredictionTower/conv2d_1/BatchNorm/feature_1/moving_mean'),
+ ('WeightSharedConvolutionalBoxPredictor/'
+ 'BoxPredictionTower/conv2d_1/BatchNorm/feature_1/moving_variance'),
# Box prediction head
('WeightSharedConvolutionalBoxPredictor/'
'WeightSharedConvolutionalBoxHead/BoxPredictor/kernel'),
@@ -485,14 +510,30 @@ class WeightSharedConvolutionalKerasBoxPredictorTest(test_case.TestCase):
'ClassPredictionTower/conv2d_0/kernel'),
('WeightSharedConvolutionalBoxPredictor/'
'ClassPredictionTower/conv2d_0/BatchNorm/feature_0/beta'),
+ ('WeightSharedConvolutionalBoxPredictor/'
+ 'ClassPredictionTower/conv2d_0/BatchNorm/feature_0/moving_mean'),
+ ('WeightSharedConvolutionalBoxPredictor/'
+ 'ClassPredictionTower/conv2d_0/BatchNorm/feature_0/moving_variance'),
('WeightSharedConvolutionalBoxPredictor/'
'ClassPredictionTower/conv2d_0/BatchNorm/feature_1/beta'),
+ ('WeightSharedConvolutionalBoxPredictor/'
+ 'ClassPredictionTower/conv2d_0/BatchNorm/feature_1/moving_mean'),
+ ('WeightSharedConvolutionalBoxPredictor/'
+ 'ClassPredictionTower/conv2d_0/BatchNorm/feature_1/moving_variance'),
('WeightSharedConvolutionalBoxPredictor/'
'ClassPredictionTower/conv2d_1/kernel'),
('WeightSharedConvolutionalBoxPredictor/'
'ClassPredictionTower/conv2d_1/BatchNorm/feature_0/beta'),
+ ('WeightSharedConvolutionalBoxPredictor/'
+ 'ClassPredictionTower/conv2d_1/BatchNorm/feature_0/moving_mean'),
+ ('WeightSharedConvolutionalBoxPredictor/'
+ 'ClassPredictionTower/conv2d_1/BatchNorm/feature_0/moving_variance'),
('WeightSharedConvolutionalBoxPredictor/'
'ClassPredictionTower/conv2d_1/BatchNorm/feature_1/beta'),
+ ('WeightSharedConvolutionalBoxPredictor/'
+ 'ClassPredictionTower/conv2d_1/BatchNorm/feature_1/moving_mean'),
+ ('WeightSharedConvolutionalBoxPredictor/'
+ 'ClassPredictionTower/conv2d_1/BatchNorm/feature_1/moving_variance'),
# Class prediction head
('WeightSharedConvolutionalBoxPredictor/'
'WeightSharedConvolutionalClassHead/ClassPredictor/kernel'),
@@ -502,21 +543,26 @@ class WeightSharedConvolutionalKerasBoxPredictorTest(test_case.TestCase):
def test_predictions_multiple_feature_maps_share_weights_without_batchnorm(
self):
+ tf.keras.backend.clear_session()
num_classes_without_background = 6
+ conv_box_predictor = (
+ box_predictor_builder
+ .build_weight_shared_convolutional_keras_box_predictor(
+ is_training=False,
+ num_classes=num_classes_without_background,
+ conv_hyperparams=self._build_conv_hyperparams(),
+ freeze_batchnorm=False,
+ inplace_batchnorm_update=False,
+ num_predictions_per_location_list=[5, 5],
+ depth=32,
+ num_layers_before_predictor=2,
+ box_code_size=4,
+ apply_batch_norm=False))
+ variables = []
+
def graph_fn(image_features1, image_features2):
- conv_box_predictor = (
- box_predictor_builder.build_weight_shared_convolutional_keras_box_predictor(
- is_training=False,
- num_classes=num_classes_without_background,
- conv_hyperparams=self._build_conv_hyperparams(),
- freeze_batchnorm=False,
- inplace_batchnorm_update=False,
- num_predictions_per_location_list=[5, 5],
- depth=32,
- num_layers_before_predictor=2,
- box_code_size=4,
- apply_batch_norm=False))
box_predictions = conv_box_predictor([image_features1, image_features2])
+ variables.extend(list(conv_box_predictor.variables))
box_encodings = tf.concat(
box_predictions[box_predictor.BOX_ENCODINGS], axis=1)
class_predictions_with_background = tf.concat(
@@ -524,11 +570,11 @@ class WeightSharedConvolutionalKerasBoxPredictorTest(test_case.TestCase):
axis=1)
return (box_encodings, class_predictions_with_background)
- with self.test_session(graph=tf.Graph()):
- graph_fn(tf.random_uniform([4, 32, 32, 3], dtype=tf.float32),
- tf.random_uniform([4, 16, 16, 3], dtype=tf.float32))
- actual_variable_set = set(
- [var.op.name for var in tf.trainable_variables()])
+ self.execute(graph_fn, [
+ np.random.rand(4, 32, 32, 3).astype(np.float32),
+ np.random.rand(4, 16, 16, 3).astype(np.float32)
+ ])
+ actual_variable_set = set([var.name.split(':')[0] for var in variables])
expected_variable_set = set([
# Box prediction tower
('WeightSharedConvolutionalBoxPredictor/'
@@ -562,23 +608,27 @@ class WeightSharedConvolutionalKerasBoxPredictorTest(test_case.TestCase):
def test_predictions_multiple_feature_maps_share_weights_with_depthwise(
self):
+ tf.keras.backend.clear_session()
num_classes_without_background = 6
+ conv_box_predictor = (
+ box_predictor_builder
+ .build_weight_shared_convolutional_keras_box_predictor(
+ is_training=False,
+ num_classes=num_classes_without_background,
+ conv_hyperparams=self._build_conv_hyperparams(add_batch_norm=False),
+ freeze_batchnorm=False,
+ inplace_batchnorm_update=False,
+ num_predictions_per_location_list=[5, 5],
+ depth=32,
+ num_layers_before_predictor=2,
+ box_code_size=4,
+ apply_batch_norm=False,
+ use_depthwise=True))
+ variables = []
+
def graph_fn(image_features1, image_features2):
- conv_box_predictor = (
- box_predictor_builder.build_weight_shared_convolutional_keras_box_predictor(
- is_training=False,
- num_classes=num_classes_without_background,
- conv_hyperparams=self._build_conv_hyperparams(
- add_batch_norm=False),
- freeze_batchnorm=False,
- inplace_batchnorm_update=False,
- num_predictions_per_location_list=[5, 5],
- depth=32,
- num_layers_before_predictor=2,
- box_code_size=4,
- apply_batch_norm=False,
- use_depthwise=True))
box_predictions = conv_box_predictor([image_features1, image_features2])
+ variables.extend(list(conv_box_predictor.variables))
box_encodings = tf.concat(
box_predictions[box_predictor.BOX_ENCODINGS], axis=1)
class_predictions_with_background = tf.concat(
@@ -586,11 +636,11 @@ class WeightSharedConvolutionalKerasBoxPredictorTest(test_case.TestCase):
axis=1)
return (box_encodings, class_predictions_with_background)
- with self.test_session(graph=tf.Graph()):
- graph_fn(tf.random_uniform([4, 32, 32, 3], dtype=tf.float32),
- tf.random_uniform([4, 16, 16, 3], dtype=tf.float32))
- actual_variable_set = set(
- [var.op.name for var in tf.trainable_variables()])
+ self.execute(graph_fn, [
+ np.random.rand(4, 32, 32, 3).astype(np.float32),
+ np.random.rand(4, 16, 16, 3).astype(np.float32)
+ ])
+ actual_variable_set = set([var.name.split(':')[0] for var in variables])
expected_variable_set = set([
# Box prediction tower
('WeightSharedConvolutionalBoxPredictor/'
@@ -635,23 +685,27 @@ class WeightSharedConvolutionalKerasBoxPredictorTest(test_case.TestCase):
self.assertEqual(expected_variable_set, actual_variable_set)
def test_no_batchnorm_params_when_batchnorm_is_not_configured(self):
+ tf.keras.backend.clear_session()
num_classes_without_background = 6
+ conv_box_predictor = (
+ box_predictor_builder
+ .build_weight_shared_convolutional_keras_box_predictor(
+ is_training=False,
+ num_classes=num_classes_without_background,
+ conv_hyperparams=self._build_conv_hyperparams(add_batch_norm=False),
+ freeze_batchnorm=False,
+ inplace_batchnorm_update=False,
+ num_predictions_per_location_list=[5, 5],
+ depth=32,
+ num_layers_before_predictor=2,
+ box_code_size=4,
+ apply_batch_norm=False))
+ variables = []
+
def graph_fn(image_features1, image_features2):
- conv_box_predictor = (
- box_predictor_builder.build_weight_shared_convolutional_keras_box_predictor(
- is_training=False,
- num_classes=num_classes_without_background,
- conv_hyperparams=self._build_conv_hyperparams(
- add_batch_norm=False),
- freeze_batchnorm=False,
- inplace_batchnorm_update=False,
- num_predictions_per_location_list=[5, 5],
- depth=32,
- num_layers_before_predictor=2,
- box_code_size=4,
- apply_batch_norm=False))
box_predictions = conv_box_predictor(
[image_features1, image_features2])
+ variables.extend(list(conv_box_predictor.variables))
box_encodings = tf.concat(
box_predictions[box_predictor.BOX_ENCODINGS], axis=1)
class_predictions_with_background = tf.concat(
@@ -659,11 +713,11 @@ class WeightSharedConvolutionalKerasBoxPredictorTest(test_case.TestCase):
axis=1)
return (box_encodings, class_predictions_with_background)
- with self.test_session(graph=tf.Graph()):
- graph_fn(tf.random_uniform([4, 32, 32, 3], dtype=tf.float32),
- tf.random_uniform([4, 16, 16, 3], dtype=tf.float32))
- actual_variable_set = set(
- [var.op.name for var in tf.trainable_variables()])
+ self.execute(graph_fn, [
+ np.random.rand(4, 32, 32, 3).astype(np.float32),
+ np.random.rand(4, 16, 16, 3).astype(np.float32)
+ ])
+ actual_variable_set = set([var.name.split(':')[0] for var in variables])
expected_variable_set = set([
# Box prediction tower
('WeightSharedConvolutionalBoxPredictor/'
@@ -697,22 +751,27 @@ class WeightSharedConvolutionalKerasBoxPredictorTest(test_case.TestCase):
def test_predictions_share_weights_share_tower_separate_batchnorm(
self):
+ tf.keras.backend.clear_session()
num_classes_without_background = 6
+ conv_box_predictor = (
+ box_predictor_builder
+ .build_weight_shared_convolutional_keras_box_predictor(
+ is_training=False,
+ num_classes=num_classes_without_background,
+ conv_hyperparams=self._build_conv_hyperparams(),
+ freeze_batchnorm=False,
+ inplace_batchnorm_update=False,
+ num_predictions_per_location_list=[5, 5],
+ depth=32,
+ num_layers_before_predictor=2,
+ box_code_size=4,
+ share_prediction_tower=True))
+ variables = []
+
def graph_fn(image_features1, image_features2):
- conv_box_predictor = (
- box_predictor_builder.build_weight_shared_convolutional_keras_box_predictor(
- is_training=False,
- num_classes=num_classes_without_background,
- conv_hyperparams=self._build_conv_hyperparams(),
- freeze_batchnorm=False,
- inplace_batchnorm_update=False,
- num_predictions_per_location_list=[5, 5],
- depth=32,
- num_layers_before_predictor=2,
- box_code_size=4,
- share_prediction_tower=True))
box_predictions = conv_box_predictor(
[image_features1, image_features2])
+ variables.extend(list(conv_box_predictor.variables))
box_encodings = tf.concat(
box_predictions[box_predictor.BOX_ENCODINGS], axis=1)
class_predictions_with_background = tf.concat(
@@ -720,11 +779,11 @@ class WeightSharedConvolutionalKerasBoxPredictorTest(test_case.TestCase):
axis=1)
return (box_encodings, class_predictions_with_background)
- with self.test_session(graph=tf.Graph()):
- graph_fn(tf.random_uniform([4, 32, 32, 3], dtype=tf.float32),
- tf.random_uniform([4, 16, 16, 3], dtype=tf.float32))
- actual_variable_set = set(
- [var.op.name for var in tf.trainable_variables()])
+ self.execute(graph_fn, [
+ np.random.rand(4, 32, 32, 3).astype(np.float32),
+ np.random.rand(4, 16, 16, 3).astype(np.float32)
+ ])
+ actual_variable_set = set([var.name.split(':')[0] for var in variables])
expected_variable_set = set([
# Shared prediction tower
('WeightSharedConvolutionalBoxPredictor/'
@@ -733,12 +792,28 @@ class WeightSharedConvolutionalKerasBoxPredictorTest(test_case.TestCase):
'PredictionTower/conv2d_0/BatchNorm/feature_0/beta'),
('WeightSharedConvolutionalBoxPredictor/'
'PredictionTower/conv2d_0/BatchNorm/feature_1/beta'),
+ ('WeightSharedConvolutionalBoxPredictor/'
+ 'PredictionTower/conv2d_0/BatchNorm/feature_0/moving_mean'),
+ ('WeightSharedConvolutionalBoxPredictor/'
+ 'PredictionTower/conv2d_0/BatchNorm/feature_1/moving_mean'),
+ ('WeightSharedConvolutionalBoxPredictor/'
+ 'PredictionTower/conv2d_0/BatchNorm/feature_0/moving_variance'),
+ ('WeightSharedConvolutionalBoxPredictor/'
+ 'PredictionTower/conv2d_0/BatchNorm/feature_1/moving_variance'),
('WeightSharedConvolutionalBoxPredictor/'
'PredictionTower/conv2d_1/kernel'),
('WeightSharedConvolutionalBoxPredictor/'
'PredictionTower/conv2d_1/BatchNorm/feature_0/beta'),
('WeightSharedConvolutionalBoxPredictor/'
'PredictionTower/conv2d_1/BatchNorm/feature_1/beta'),
+ ('WeightSharedConvolutionalBoxPredictor/'
+ 'PredictionTower/conv2d_1/BatchNorm/feature_0/moving_mean'),
+ ('WeightSharedConvolutionalBoxPredictor/'
+ 'PredictionTower/conv2d_1/BatchNorm/feature_1/moving_mean'),
+ ('WeightSharedConvolutionalBoxPredictor/'
+ 'PredictionTower/conv2d_1/BatchNorm/feature_0/moving_variance'),
+ ('WeightSharedConvolutionalBoxPredictor/'
+ 'PredictionTower/conv2d_1/BatchNorm/feature_1/moving_variance'),
# Box prediction head
('WeightSharedConvolutionalBoxPredictor/'
'WeightSharedConvolutionalBoxHead/BoxPredictor/kernel'),
@@ -753,24 +828,28 @@ class WeightSharedConvolutionalKerasBoxPredictorTest(test_case.TestCase):
def test_predictions_share_weights_share_tower_without_batchnorm(
self):
+ tf.keras.backend.clear_session()
num_classes_without_background = 6
+ conv_box_predictor = (
+ box_predictor_builder
+ .build_weight_shared_convolutional_keras_box_predictor(
+ is_training=False,
+ num_classes=num_classes_without_background,
+ conv_hyperparams=self._build_conv_hyperparams(add_batch_norm=False),
+ freeze_batchnorm=False,
+ inplace_batchnorm_update=False,
+ num_predictions_per_location_list=[5, 5],
+ depth=32,
+ num_layers_before_predictor=2,
+ box_code_size=4,
+ share_prediction_tower=True,
+ apply_batch_norm=False))
+ variables = []
+
def graph_fn(image_features1, image_features2):
- conv_box_predictor = (
- box_predictor_builder.build_weight_shared_convolutional_keras_box_predictor(
- is_training=False,
- num_classes=num_classes_without_background,
- conv_hyperparams=self._build_conv_hyperparams(
- add_batch_norm=False),
- freeze_batchnorm=False,
- inplace_batchnorm_update=False,
- num_predictions_per_location_list=[5, 5],
- depth=32,
- num_layers_before_predictor=2,
- box_code_size=4,
- share_prediction_tower=True,
- apply_batch_norm=False))
box_predictions = conv_box_predictor(
[image_features1, image_features2])
+ variables.extend(list(conv_box_predictor.variables))
box_encodings = tf.concat(
box_predictions[box_predictor.BOX_ENCODINGS], axis=1)
class_predictions_with_background = tf.concat(
@@ -778,11 +857,11 @@ class WeightSharedConvolutionalKerasBoxPredictorTest(test_case.TestCase):
axis=1)
return (box_encodings, class_predictions_with_background)
- with self.test_session(graph=tf.Graph()):
- graph_fn(tf.random_uniform([4, 32, 32, 3], dtype=tf.float32),
- tf.random_uniform([4, 16, 16, 3], dtype=tf.float32))
- actual_variable_set = set(
- [var.op.name for var in tf.trainable_variables()])
+ self.execute(graph_fn, [
+ np.random.rand(4, 32, 32, 3).astype(np.float32),
+ np.random.rand(4, 16, 16, 3).astype(np.float32)
+ ])
+ actual_variable_set = set([var.name.split(':')[0] for var in variables])
expected_variable_set = set([
# Shared prediction tower
('WeightSharedConvolutionalBoxPredictor/'
@@ -806,40 +885,6 @@ class WeightSharedConvolutionalKerasBoxPredictorTest(test_case.TestCase):
self.assertEqual(expected_variable_set, actual_variable_set)
- def test_get_predictions_with_feature_maps_of_dynamic_shape(
- self):
- image_features = tf.placeholder(dtype=tf.float32, shape=[4, None, None, 64])
- conv_box_predictor = (
- box_predictor_builder.build_weight_shared_convolutional_keras_box_predictor(
- is_training=False,
- num_classes=0,
- conv_hyperparams=self._build_conv_hyperparams(),
- freeze_batchnorm=False,
- inplace_batchnorm_update=False,
- num_predictions_per_location_list=[5],
- depth=32,
- num_layers_before_predictor=1,
- box_code_size=4))
- box_predictions = conv_box_predictor([image_features])
- box_encodings = tf.concat(box_predictions[box_predictor.BOX_ENCODINGS],
- axis=1)
- objectness_predictions = tf.concat(box_predictions[
- box_predictor.CLASS_PREDICTIONS_WITH_BACKGROUND], axis=1)
- init_op = tf.global_variables_initializer()
-
- resolution = 32
- expected_num_anchors = resolution*resolution*5
- with self.test_session() as sess:
- sess.run(init_op)
- (box_encodings_shape,
- objectness_predictions_shape) = sess.run(
- [tf.shape(box_encodings), tf.shape(objectness_predictions)],
- feed_dict={image_features:
- np.random.rand(4, resolution, resolution, 64)})
- self.assertAllEqual(box_encodings_shape, [4, expected_num_anchors, 4])
- self.assertAllEqual(objectness_predictions_shape,
- [4, expected_num_anchors, 1])
-
def test_other_heads_predictions(self):
box_code_size = 4
num_classes_without_background = 3
@@ -847,37 +892,36 @@ class WeightSharedConvolutionalKerasBoxPredictorTest(test_case.TestCase):
mask_height = 5
mask_width = 5
num_predictions_per_location = 5
-
+ box_prediction_head = keras_box_head.WeightSharedConvolutionalBoxHead(
+ box_code_size=box_code_size,
+ conv_hyperparams=self._build_conv_hyperparams(),
+ num_predictions_per_location=num_predictions_per_location)
+ class_prediction_head = keras_class_head.WeightSharedConvolutionalClassHead(
+ num_class_slots=num_classes_without_background + 1,
+ conv_hyperparams=self._build_conv_hyperparams(),
+ num_predictions_per_location=num_predictions_per_location)
+ other_heads = {
+ other_head_name:
+ keras_mask_head.WeightSharedConvolutionalMaskHead(
+ num_classes=num_classes_without_background,
+ conv_hyperparams=self._build_conv_hyperparams(),
+ num_predictions_per_location=num_predictions_per_location,
+ mask_height=mask_height,
+ mask_width=mask_width)
+ }
+
+ conv_box_predictor = box_predictor.WeightSharedConvolutionalBoxPredictor(
+ is_training=False,
+ num_classes=num_classes_without_background,
+ box_prediction_head=box_prediction_head,
+ class_prediction_head=class_prediction_head,
+ other_heads=other_heads,
+ conv_hyperparams=self._build_conv_hyperparams(),
+ freeze_batchnorm=False,
+ inplace_batchnorm_update=False,
+ depth=32,
+ num_layers_before_predictor=2)
def graph_fn(image_features):
- box_prediction_head = keras_box_head.WeightSharedConvolutionalBoxHead(
- box_code_size=box_code_size,
- conv_hyperparams=self._build_conv_hyperparams(),
- num_predictions_per_location=num_predictions_per_location)
- class_prediction_head = keras_class_head.WeightSharedConvolutionalClassHead(
- num_class_slots=num_classes_without_background + 1,
- conv_hyperparams=self._build_conv_hyperparams(),
- num_predictions_per_location=num_predictions_per_location)
- other_heads = {
- other_head_name:
- keras_mask_head.WeightSharedConvolutionalMaskHead(
- num_classes=num_classes_without_background,
- conv_hyperparams=self._build_conv_hyperparams(),
- num_predictions_per_location=num_predictions_per_location,
- mask_height=mask_height,
- mask_width=mask_width)
- }
-
- conv_box_predictor = box_predictor.WeightSharedConvolutionalBoxPredictor(
- is_training=False,
- num_classes=num_classes_without_background,
- box_prediction_head=box_prediction_head,
- class_prediction_head=class_prediction_head,
- other_heads=other_heads,
- conv_hyperparams=self._build_conv_hyperparams(),
- freeze_batchnorm=False,
- inplace_batchnorm_update=False,
- depth=32,
- num_layers_before_predictor=2)
box_predictions = conv_box_predictor([image_features])
for key, value in box_predictions.items():
box_predictions[key] = tf.concat(value, axis=1)
diff --git a/research/object_detection/predictors/heads/box_head_test.py b/research/object_detection/predictors/heads/box_head_tf1_test.py
similarity index 94%
rename from research/object_detection/predictors/heads/box_head_test.py
rename to research/object_detection/predictors/heads/box_head_tf1_test.py
index dd69115e8ce997bf8ce9d4c3f90a3cc060763456..ab534a2bd029abed5f39e232d023a27dd2e9a361 100644
--- a/research/object_detection/predictors/heads/box_head_test.py
+++ b/research/object_detection/predictors/heads/box_head_tf1_test.py
@@ -14,6 +14,7 @@
# ==============================================================================
"""Tests for object_detection.predictors.heads.box_head."""
+import unittest
import tensorflow.compat.v1 as tf
from google.protobuf import text_format
@@ -21,8 +22,10 @@ from object_detection.builders import hyperparams_builder
from object_detection.predictors.heads import box_head
from object_detection.protos import hyperparams_pb2
from object_detection.utils import test_case
+from object_detection.utils import tf_version
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.')
class MaskRCNNBoxHeadTest(test_case.TestCase):
def _build_arg_scope_with_hyperparams(self,
@@ -59,6 +62,7 @@ class MaskRCNNBoxHeadTest(test_case.TestCase):
self.assertAllEqual([64, 1, 20, 4], prediction.get_shape().as_list())
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.')
class ConvolutionalBoxPredictorTest(test_case.TestCase):
def _build_arg_scope_with_hyperparams(
@@ -92,6 +96,7 @@ class ConvolutionalBoxPredictorTest(test_case.TestCase):
self.assertAllEqual([64, 323, 1, 4], box_encodings.get_shape().as_list())
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.')
class WeightSharedConvolutionalBoxPredictorTest(test_case.TestCase):
def _build_arg_scope_with_hyperparams(
diff --git a/research/object_detection/predictors/heads/class_head_test.py b/research/object_detection/predictors/heads/class_head_tf1_test.py
similarity index 96%
rename from research/object_detection/predictors/heads/class_head_test.py
rename to research/object_detection/predictors/heads/class_head_tf1_test.py
index eaadcdc39f4bd147b8d141eb99afd42f6cc3da36..3dc8fb120cb9a4c19ff2d595d31dc3645f6e06d0 100644
--- a/research/object_detection/predictors/heads/class_head_test.py
+++ b/research/object_detection/predictors/heads/class_head_tf1_test.py
@@ -14,6 +14,7 @@
# ==============================================================================
"""Tests for object_detection.predictors.heads.class_head."""
+import unittest
import tensorflow.compat.v1 as tf
from google.protobuf import text_format
@@ -21,8 +22,10 @@ from object_detection.builders import hyperparams_builder
from object_detection.predictors.heads import class_head
from object_detection.protos import hyperparams_pb2
from object_detection.utils import test_case
+from object_detection.utils import tf_version
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.')
class MaskRCNNClassHeadTest(test_case.TestCase):
def _build_arg_scope_with_hyperparams(self,
@@ -81,6 +84,7 @@ class MaskRCNNClassHeadTest(test_case.TestCase):
self.assertSetEqual(expected_var_names, actual_variable_set)
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.')
class ConvolutionalClassPredictorTest(test_case.TestCase):
def _build_arg_scope_with_hyperparams(
@@ -140,6 +144,7 @@ class ConvolutionalClassPredictorTest(test_case.TestCase):
self.assertSetEqual(expected_var_names, actual_variable_set)
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.')
class WeightSharedConvolutionalClassPredictorTest(test_case.TestCase):
def _build_arg_scope_with_hyperparams(
diff --git a/research/object_detection/predictors/heads/keras_box_head_test.py b/research/object_detection/predictors/heads/keras_box_head_tf2_test.py
similarity index 67%
rename from research/object_detection/predictors/heads/keras_box_head_test.py
rename to research/object_detection/predictors/heads/keras_box_head_tf2_test.py
index 1dcf7ce36bd5938b53a1366ea70b86d16602f18f..e9e8b8dcc3aa07ce6917a881c42cf51db7318576 100644
--- a/research/object_detection/predictors/heads/keras_box_head_test.py
+++ b/research/object_detection/predictors/heads/keras_box_head_tf2_test.py
@@ -14,6 +14,7 @@
# ==============================================================================
"""Tests for object_detection.predictors.heads.box_head."""
+import unittest
import tensorflow.compat.v1 as tf
from google.protobuf import text_format
@@ -21,8 +22,10 @@ from object_detection.builders import hyperparams_builder
from object_detection.predictors.heads import keras_box_head
from object_detection.protos import hyperparams_pb2
from object_detection.utils import test_case
+from object_detection.utils import tf_version
+@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.')
class ConvolutionalKerasBoxHeadTest(test_case.TestCase):
def _build_conv_hyperparams(self):
@@ -51,10 +54,13 @@ class ConvolutionalKerasBoxHeadTest(test_case.TestCase):
freeze_batchnorm=False,
num_predictions_per_location=1,
use_depthwise=False)
- image_feature = tf.random_uniform(
- [64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32)
- box_encodings = box_prediction_head(image_feature)
- self.assertAllEqual([64, 323, 1, 4], box_encodings.get_shape().as_list())
+ def graph_fn():
+ image_feature = tf.random_uniform(
+ [64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32)
+ box_encodings = box_prediction_head(image_feature)
+ return box_encodings
+ box_encodings = self.execute(graph_fn, [])
+ self.assertAllEqual([64, 323, 1, 4], box_encodings.shape)
def test_prediction_size_depthwise_true(self):
conv_hyperparams = self._build_conv_hyperparams()
@@ -66,12 +72,16 @@ class ConvolutionalKerasBoxHeadTest(test_case.TestCase):
freeze_batchnorm=False,
num_predictions_per_location=1,
use_depthwise=True)
- image_feature = tf.random_uniform(
- [64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32)
- box_encodings = box_prediction_head(image_feature)
- self.assertAllEqual([64, 323, 1, 4], box_encodings.get_shape().as_list())
+ def graph_fn():
+ image_feature = tf.random_uniform(
+ [64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32)
+ box_encodings = box_prediction_head(image_feature)
+ return box_encodings
+ box_encodings = self.execute(graph_fn, [])
+ self.assertAllEqual([64, 323, 1, 4], box_encodings.shape)
+@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.')
class MaskRCNNKerasBoxHeadTest(test_case.TestCase):
def _build_fc_hyperparams(
@@ -102,12 +112,16 @@ class MaskRCNNKerasBoxHeadTest(test_case.TestCase):
dropout_keep_prob=0.5,
box_code_size=4,
share_box_across_classes=False)
- roi_pooled_features = tf.random_uniform(
- [64, 7, 7, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32)
- prediction = box_prediction_head(roi_pooled_features)
- self.assertAllEqual([64, 1, 20, 4], prediction.get_shape().as_list())
+ def graph_fn():
+ roi_pooled_features = tf.random_uniform(
+ [64, 7, 7, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32)
+ prediction = box_prediction_head(roi_pooled_features)
+ return prediction
+ prediction = self.execute(graph_fn, [])
+ self.assertAllEqual([64, 1, 20, 4], prediction.shape)
+@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.')
class WeightSharedConvolutionalKerasBoxHead(test_case.TestCase):
def _build_conv_hyperparams(self):
@@ -133,10 +147,13 @@ class WeightSharedConvolutionalKerasBoxHead(test_case.TestCase):
conv_hyperparams=conv_hyperparams,
num_predictions_per_location=1,
use_depthwise=False)
- image_feature = tf.random_uniform(
- [64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32)
- box_encodings = box_prediction_head(image_feature)
- self.assertAllEqual([64, 323, 4], box_encodings.get_shape().as_list())
+ def graph_fn():
+ image_feature = tf.random_uniform(
+ [64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32)
+ box_encodings = box_prediction_head(image_feature)
+ return box_encodings
+ box_encodings = self.execute(graph_fn, [])
+ self.assertAllEqual([64, 323, 4], box_encodings.shape)
def test_prediction_size_depthwise_true(self):
conv_hyperparams = self._build_conv_hyperparams()
@@ -145,40 +162,38 @@ class WeightSharedConvolutionalKerasBoxHead(test_case.TestCase):
conv_hyperparams=conv_hyperparams,
num_predictions_per_location=1,
use_depthwise=True)
- image_feature = tf.random_uniform(
- [64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32)
- box_encodings = box_prediction_head(image_feature)
- self.assertAllEqual([64, 323, 4], box_encodings.get_shape().as_list())
-
- def test_variable_count_depth_wise_true(self):
- g = tf.Graph()
- with g.as_default():
- conv_hyperparams = self._build_conv_hyperparams()
- box_prediction_head = keras_box_head.WeightSharedConvolutionalBoxHead(
- box_code_size=4,
- conv_hyperparams=conv_hyperparams,
- num_predictions_per_location=1,
- use_depthwise=True)
+ def graph_fn():
image_feature = tf.random_uniform(
[64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32)
- _ = box_prediction_head(image_feature)
- variables = g.get_collection(tf.GraphKeys.GLOBAL_VARIABLES)
- self.assertEqual(len(variables), 3)
+ box_encodings = box_prediction_head(image_feature)
+ return box_encodings
+ box_encodings = self.execute(graph_fn, [])
+ self.assertAllEqual([64, 323, 4], box_encodings.shape)
+
+ def test_variable_count_depth_wise_true(self):
+ conv_hyperparams = self._build_conv_hyperparams()
+ box_prediction_head = keras_box_head.WeightSharedConvolutionalBoxHead(
+ box_code_size=4,
+ conv_hyperparams=conv_hyperparams,
+ num_predictions_per_location=1,
+ use_depthwise=True)
+ image_feature = tf.random_uniform(
+ [64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32)
+ box_prediction_head(image_feature)
+ self.assertEqual(len(box_prediction_head.variables), 3)
def test_variable_count_depth_wise_False(self):
- g = tf.Graph()
- with g.as_default():
- conv_hyperparams = self._build_conv_hyperparams()
- box_prediction_head = keras_box_head.WeightSharedConvolutionalBoxHead(
- box_code_size=4,
- conv_hyperparams=conv_hyperparams,
- num_predictions_per_location=1,
- use_depthwise=False)
- image_feature = tf.random_uniform(
- [64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32)
- _ = box_prediction_head(image_feature)
- variables = g.get_collection(tf.GraphKeys.GLOBAL_VARIABLES)
- self.assertEqual(len(variables), 2)
+ conv_hyperparams = self._build_conv_hyperparams()
+ box_prediction_head = keras_box_head.WeightSharedConvolutionalBoxHead(
+ box_code_size=4,
+ conv_hyperparams=conv_hyperparams,
+ num_predictions_per_location=1,
+ use_depthwise=False)
+ image_feature = tf.random_uniform(
+ [64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32)
+ box_prediction_head(image_feature)
+ self.assertEqual(len(box_prediction_head.variables), 2)
+
if __name__ == '__main__':
tf.test.main()
diff --git a/research/object_detection/predictors/heads/keras_class_head_test.py b/research/object_detection/predictors/heads/keras_class_head_tf2_test.py
similarity index 66%
rename from research/object_detection/predictors/heads/keras_class_head_test.py
rename to research/object_detection/predictors/heads/keras_class_head_tf2_test.py
index 4a25efc3eed4aa592646de89dc630326691041eb..aa890ce522defb6ec4c97965846e8f20529bc24b 100644
--- a/research/object_detection/predictors/heads/keras_class_head_test.py
+++ b/research/object_detection/predictors/heads/keras_class_head_tf2_test.py
@@ -14,6 +14,7 @@
# ==============================================================================
"""Tests for object_detection.predictors.heads.class_head."""
+import unittest
import tensorflow.compat.v1 as tf
from google.protobuf import text_format
@@ -21,8 +22,10 @@ from object_detection.builders import hyperparams_builder
from object_detection.predictors.heads import keras_class_head
from object_detection.protos import hyperparams_pb2
from object_detection.utils import test_case
+from object_detection.utils import tf_version
+@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.')
class ConvolutionalKerasClassPredictorTest(test_case.TestCase):
def _build_conv_hyperparams(self):
@@ -53,11 +56,13 @@ class ConvolutionalKerasClassPredictorTest(test_case.TestCase):
freeze_batchnorm=False,
num_predictions_per_location=1,
use_depthwise=False)
- image_feature = tf.random_uniform(
- [64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32)
- class_predictions = class_prediction_head(image_feature,)
- self.assertAllEqual([64, 323, 20],
- class_predictions.get_shape().as_list())
+ def graph_fn():
+ image_feature = tf.random_uniform(
+ [64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32)
+ class_predictions = class_prediction_head(image_feature,)
+ return class_predictions
+ class_predictions = self.execute(graph_fn, [])
+ self.assertAllEqual([64, 323, 20], class_predictions.shape)
def test_prediction_size_depthwise_true(self):
conv_hyperparams = self._build_conv_hyperparams()
@@ -71,13 +76,16 @@ class ConvolutionalKerasClassPredictorTest(test_case.TestCase):
freeze_batchnorm=False,
num_predictions_per_location=1,
use_depthwise=True)
- image_feature = tf.random_uniform(
- [64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32)
- class_predictions = class_prediction_head(image_feature,)
- self.assertAllEqual([64, 323, 20],
- class_predictions.get_shape().as_list())
+ def graph_fn():
+ image_feature = tf.random_uniform(
+ [64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32)
+ class_predictions = class_prediction_head(image_feature,)
+ return class_predictions
+ class_predictions = self.execute(graph_fn, [])
+ self.assertAllEqual([64, 323, 20], class_predictions.shape)
+@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.')
class MaskRCNNClassHeadTest(test_case.TestCase):
def _build_fc_hyperparams(self,
@@ -106,12 +114,16 @@ class MaskRCNNClassHeadTest(test_case.TestCase):
freeze_batchnorm=False,
use_dropout=True,
dropout_keep_prob=0.5)
- roi_pooled_features = tf.random_uniform(
- [64, 7, 7, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32)
- prediction = class_prediction_head(roi_pooled_features)
- self.assertAllEqual([64, 1, 20], prediction.get_shape().as_list())
+ def graph_fn():
+ roi_pooled_features = tf.random_uniform(
+ [64, 7, 7, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32)
+ prediction = class_prediction_head(roi_pooled_features)
+ return prediction
+ prediction = self.execute(graph_fn, [])
+ self.assertAllEqual([64, 1, 20], prediction.shape)
+@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.')
class WeightSharedConvolutionalKerasClassPredictorTest(test_case.TestCase):
def _build_conv_hyperparams(self):
@@ -137,10 +149,13 @@ class WeightSharedConvolutionalKerasClassPredictorTest(test_case.TestCase):
conv_hyperparams=conv_hyperparams,
num_predictions_per_location=1,
use_depthwise=False)
- image_feature = tf.random_uniform(
- [64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32)
- class_predictions = class_prediction_head(image_feature)
- self.assertAllEqual([64, 323, 20], class_predictions.get_shape().as_list())
+ def graph_fn():
+ image_feature = tf.random_uniform(
+ [64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32)
+ class_predictions = class_prediction_head(image_feature)
+ return class_predictions
+ class_predictions = self.execute(graph_fn, [])
+ self.assertAllEqual([64, 323, 20], class_predictions.shape)
def test_prediction_size_depthwise_true(self):
conv_hyperparams = self._build_conv_hyperparams()
@@ -149,42 +164,39 @@ class WeightSharedConvolutionalKerasClassPredictorTest(test_case.TestCase):
conv_hyperparams=conv_hyperparams,
num_predictions_per_location=1,
use_depthwise=True)
- image_feature = tf.random_uniform(
- [64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32)
- class_predictions = class_prediction_head(image_feature)
- self.assertAllEqual([64, 323, 20], class_predictions.get_shape().as_list())
-
- def test_variable_count_depth_wise_true(self):
- g = tf.Graph()
- with g.as_default():
- conv_hyperparams = self._build_conv_hyperparams()
- class_prediction_head = (
- keras_class_head.WeightSharedConvolutionalClassHead(
- num_class_slots=20,
- conv_hyperparams=conv_hyperparams,
- num_predictions_per_location=1,
- use_depthwise=True))
+ def graph_fn():
image_feature = tf.random_uniform(
[64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32)
- _ = class_prediction_head(image_feature)
- variables = g.get_collection(tf.GraphKeys.GLOBAL_VARIABLES)
- self.assertEqual(len(variables), 3)
+ class_predictions = class_prediction_head(image_feature)
+ return class_predictions
+ class_predictions = self.execute(graph_fn, [])
+ self.assertAllEqual([64, 323, 20], class_predictions.shape)
+
+ def test_variable_count_depth_wise_true(self):
+ conv_hyperparams = self._build_conv_hyperparams()
+ class_prediction_head = (
+ keras_class_head.WeightSharedConvolutionalClassHead(
+ num_class_slots=20,
+ conv_hyperparams=conv_hyperparams,
+ num_predictions_per_location=1,
+ use_depthwise=True))
+ image_feature = tf.random_uniform(
+ [64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32)
+ class_prediction_head(image_feature)
+ self.assertEqual(len(class_prediction_head.variables), 3)
def test_variable_count_depth_wise_False(self):
- g = tf.Graph()
- with g.as_default():
- conv_hyperparams = self._build_conv_hyperparams()
- class_prediction_head = (
- keras_class_head.WeightSharedConvolutionalClassHead(
- num_class_slots=20,
- conv_hyperparams=conv_hyperparams,
- num_predictions_per_location=1,
- use_depthwise=False))
- image_feature = tf.random_uniform(
- [64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32)
- _ = class_prediction_head(image_feature)
- variables = g.get_collection(tf.GraphKeys.GLOBAL_VARIABLES)
- self.assertEqual(len(variables), 2)
+ conv_hyperparams = self._build_conv_hyperparams()
+ class_prediction_head = (
+ keras_class_head.WeightSharedConvolutionalClassHead(
+ num_class_slots=20,
+ conv_hyperparams=conv_hyperparams,
+ num_predictions_per_location=1,
+ use_depthwise=False))
+ image_feature = tf.random_uniform(
+ [64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32)
+ class_prediction_head(image_feature)
+ self.assertEqual(len(class_prediction_head.variables), 2)
if __name__ == '__main__':
diff --git a/research/object_detection/predictors/heads/keras_mask_head_test.py b/research/object_detection/predictors/heads/keras_mask_head_tf2_test.py
similarity index 67%
rename from research/object_detection/predictors/heads/keras_mask_head_test.py
rename to research/object_detection/predictors/heads/keras_mask_head_tf2_test.py
index 4cdce7a1c5bb629631d6c1aada0eefe14f9c81a5..5465be06fe1fe5150c8c4c3583bfcd3be5c5d079 100644
--- a/research/object_detection/predictors/heads/keras_mask_head_test.py
+++ b/research/object_detection/predictors/heads/keras_mask_head_tf2_test.py
@@ -14,6 +14,7 @@
# ==============================================================================
"""Tests for object_detection.predictors.heads.mask_head."""
+import unittest
import tensorflow.compat.v1 as tf
from google.protobuf import text_format
@@ -21,8 +22,10 @@ from object_detection.builders import hyperparams_builder
from object_detection.predictors.heads import keras_mask_head
from object_detection.protos import hyperparams_pb2
from object_detection.utils import test_case
+from object_detection.utils import tf_version
+@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.')
class ConvolutionalMaskPredictorTest(test_case.TestCase):
def _build_conv_hyperparams(self):
@@ -55,11 +58,13 @@ class ConvolutionalMaskPredictorTest(test_case.TestCase):
use_depthwise=False,
mask_height=7,
mask_width=7)
- image_feature = tf.random_uniform(
- [64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32)
- mask_predictions = mask_prediction_head(image_feature)
- self.assertAllEqual([64, 323, 20, 7, 7],
- mask_predictions.get_shape().as_list())
+ def graph_fn():
+ image_feature = tf.random_uniform(
+ [64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32)
+ mask_predictions = mask_prediction_head(image_feature)
+ return mask_predictions
+ mask_predictions = self.execute(graph_fn, [])
+ self.assertAllEqual([64, 323, 20, 7, 7], mask_predictions.shape)
def test_prediction_size_use_depthwise_true(self):
conv_hyperparams = self._build_conv_hyperparams()
@@ -75,11 +80,13 @@ class ConvolutionalMaskPredictorTest(test_case.TestCase):
use_depthwise=True,
mask_height=7,
mask_width=7)
- image_feature = tf.random_uniform(
- [64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32)
- mask_predictions = mask_prediction_head(image_feature)
- self.assertAllEqual([64, 323, 20, 7, 7],
- mask_predictions.get_shape().as_list())
+ def graph_fn():
+ image_feature = tf.random_uniform(
+ [64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32)
+ mask_predictions = mask_prediction_head(image_feature)
+ return mask_predictions
+ mask_predictions = self.execute(graph_fn, [])
+ self.assertAllEqual([64, 323, 20, 7, 7], mask_predictions.shape)
def test_class_agnostic_prediction_size_use_depthwise_false(self):
conv_hyperparams = self._build_conv_hyperparams()
@@ -96,11 +103,13 @@ class ConvolutionalMaskPredictorTest(test_case.TestCase):
mask_height=7,
mask_width=7,
masks_are_class_agnostic=True)
- image_feature = tf.random_uniform(
- [64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32)
- mask_predictions = mask_prediction_head(image_feature)
- self.assertAllEqual([64, 323, 1, 7, 7],
- mask_predictions.get_shape().as_list())
+ def graph_fn():
+ image_feature = tf.random_uniform(
+ [64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32)
+ mask_predictions = mask_prediction_head(image_feature)
+ return mask_predictions
+ mask_predictions = self.execute(graph_fn, [])
+ self.assertAllEqual([64, 323, 1, 7, 7], mask_predictions.shape)
def test_class_agnostic_prediction_size_use_depthwise_true(self):
conv_hyperparams = self._build_conv_hyperparams()
@@ -117,13 +126,16 @@ class ConvolutionalMaskPredictorTest(test_case.TestCase):
mask_height=7,
mask_width=7,
masks_are_class_agnostic=True)
- image_feature = tf.random_uniform(
- [64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32)
- mask_predictions = mask_prediction_head(image_feature)
- self.assertAllEqual([64, 323, 1, 7, 7],
- mask_predictions.get_shape().as_list())
+ def graph_fn():
+ image_feature = tf.random_uniform(
+ [64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32)
+ mask_predictions = mask_prediction_head(image_feature)
+ return mask_predictions
+ mask_predictions = self.execute(graph_fn, [])
+ self.assertAllEqual([64, 323, 1, 7, 7], mask_predictions.shape)
+@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.')
class MaskRCNNMaskHeadTest(test_case.TestCase):
def _build_conv_hyperparams(self,
@@ -155,10 +167,13 @@ class MaskRCNNMaskHeadTest(test_case.TestCase):
mask_prediction_num_conv_layers=2,
mask_prediction_conv_depth=256,
masks_are_class_agnostic=False)
- roi_pooled_features = tf.random_uniform(
- [64, 7, 7, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32)
- prediction = mask_prediction_head(roi_pooled_features)
- self.assertAllEqual([64, 1, 20, 14, 14], prediction.get_shape().as_list())
+ def graph_fn():
+ roi_pooled_features = tf.random_uniform(
+ [64, 7, 7, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32)
+ prediction = mask_prediction_head(roi_pooled_features)
+ return prediction
+ prediction = self.execute(graph_fn, [])
+ self.assertAllEqual([64, 1, 20, 14, 14], prediction.shape)
def test_prediction_size_with_convolve_then_upsample(self):
mask_prediction_head = keras_mask_head.MaskRCNNMaskHead(
@@ -172,12 +187,16 @@ class MaskRCNNMaskHeadTest(test_case.TestCase):
mask_prediction_conv_depth=256,
masks_are_class_agnostic=True,
convolve_then_upsample=True)
- roi_pooled_features = tf.random_uniform(
- [64, 14, 14, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32)
- prediction = mask_prediction_head(roi_pooled_features)
- self.assertAllEqual([64, 1, 1, 28, 28], prediction.get_shape().as_list())
+ def graph_fn():
+ roi_pooled_features = tf.random_uniform(
+ [64, 14, 14, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32)
+ prediction = mask_prediction_head(roi_pooled_features)
+ return prediction
+ prediction = self.execute(graph_fn, [])
+ self.assertAllEqual([64, 1, 1, 28, 28], prediction.shape)
+@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.')
class WeightSharedConvolutionalMaskPredictorTest(test_case.TestCase):
def _build_conv_hyperparams(self):
@@ -204,11 +223,13 @@ class WeightSharedConvolutionalMaskPredictorTest(test_case.TestCase):
conv_hyperparams=self._build_conv_hyperparams(),
mask_height=7,
mask_width=7))
- image_feature = tf.random_uniform(
- [64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32)
- mask_predictions = mask_prediction_head(image_feature)
- self.assertAllEqual([64, 323, 20, 7, 7],
- mask_predictions.get_shape().as_list())
+ def graph_fn():
+ image_feature = tf.random_uniform(
+ [64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32)
+ mask_predictions = mask_prediction_head(image_feature)
+ return mask_predictions
+ mask_predictions = self.execute(graph_fn, [])
+ self.assertAllEqual([64, 323, 20, 7, 7], mask_predictions.shape)
def test_class_agnostic_prediction_size(self):
mask_prediction_head = (
@@ -219,11 +240,13 @@ class WeightSharedConvolutionalMaskPredictorTest(test_case.TestCase):
mask_height=7,
mask_width=7,
masks_are_class_agnostic=True))
- image_feature = tf.random_uniform(
- [64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32)
- mask_predictions = mask_prediction_head(image_feature)
- self.assertAllEqual([64, 323, 1, 7, 7],
- mask_predictions.get_shape().as_list())
+ def graph_fn():
+ image_feature = tf.random_uniform(
+ [64, 17, 19, 1024], minval=-10.0, maxval=10.0, dtype=tf.float32)
+ mask_predictions = mask_prediction_head(image_feature)
+ return mask_predictions
+ mask_predictions = self.execute(graph_fn, [])
+ self.assertAllEqual([64, 323, 1, 7, 7], mask_predictions.shape)
if __name__ == '__main__':
tf.test.main()
diff --git a/research/object_detection/predictors/heads/keypoint_head_test.py b/research/object_detection/predictors/heads/keypoint_head_tf1_test.py
similarity index 94%
rename from research/object_detection/predictors/heads/keypoint_head_test.py
rename to research/object_detection/predictors/heads/keypoint_head_tf1_test.py
index 0dc4c6f7307000051850d156a3974f8e51415b76..828174989133fd2ec6552ad848985719bdae35a5 100644
--- a/research/object_detection/predictors/heads/keypoint_head_test.py
+++ b/research/object_detection/predictors/heads/keypoint_head_tf1_test.py
@@ -14,6 +14,7 @@
# ==============================================================================
"""Tests for object_detection.predictors.heads.keypoint_head."""
+import unittest
import tensorflow.compat.v1 as tf
from google.protobuf import text_format
@@ -21,8 +22,10 @@ from object_detection.builders import hyperparams_builder
from object_detection.predictors.heads import keypoint_head
from object_detection.protos import hyperparams_pb2
from object_detection.utils import test_case
+from object_detection.utils import tf_version
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.')
class MaskRCNNKeypointHeadTest(test_case.TestCase):
def _build_arg_scope_with_hyperparams(self,
diff --git a/research/object_detection/predictors/heads/mask_head_test.py b/research/object_detection/predictors/heads/mask_head_tf1_test.py
similarity index 96%
rename from research/object_detection/predictors/heads/mask_head_test.py
rename to research/object_detection/predictors/heads/mask_head_tf1_test.py
index d3bd6819d3cb5499962028153964ee853eb147a5..152394836135abeaa68f32dd48275a89347d4059 100644
--- a/research/object_detection/predictors/heads/mask_head_test.py
+++ b/research/object_detection/predictors/heads/mask_head_tf1_test.py
@@ -14,6 +14,7 @@
# ==============================================================================
"""Tests for object_detection.predictors.heads.mask_head."""
+import unittest
import tensorflow.compat.v1 as tf
from google.protobuf import text_format
@@ -21,8 +22,10 @@ from object_detection.builders import hyperparams_builder
from object_detection.predictors.heads import mask_head
from object_detection.protos import hyperparams_pb2
from object_detection.utils import test_case
+from object_detection.utils import tf_version
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.')
class MaskRCNNMaskHeadTest(test_case.TestCase):
def _build_arg_scope_with_hyperparams(self,
@@ -75,6 +78,7 @@ class MaskRCNNMaskHeadTest(test_case.TestCase):
self.assertAllEqual([64, 1, 1, 28, 28], prediction.get_shape().as_list())
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.')
class ConvolutionalMaskPredictorTest(test_case.TestCase):
def _build_arg_scope_with_hyperparams(
@@ -131,6 +135,7 @@ class ConvolutionalMaskPredictorTest(test_case.TestCase):
mask_predictions.get_shape().as_list())
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.')
class WeightSharedConvolutionalMaskPredictorTest(test_case.TestCase):
def _build_arg_scope_with_hyperparams(
diff --git a/research/object_detection/predictors/mask_rcnn_box_predictor_test.py b/research/object_detection/predictors/mask_rcnn_box_predictor_tf1_test.py
similarity index 97%
rename from research/object_detection/predictors/mask_rcnn_box_predictor_test.py
rename to research/object_detection/predictors/mask_rcnn_box_predictor_tf1_test.py
index 4733e7a5f4951e808d5f34f0d98f402b98cf9904..d9a4bcbbf004dedc670956baf05615358e33e1a1 100644
--- a/research/object_detection/predictors/mask_rcnn_box_predictor_test.py
+++ b/research/object_detection/predictors/mask_rcnn_box_predictor_tf1_test.py
@@ -14,6 +14,7 @@
# ==============================================================================
"""Tests for object_detection.predictors.mask_rcnn_box_predictor."""
+import unittest
import numpy as np
import tensorflow.compat.v1 as tf
@@ -23,8 +24,10 @@ from object_detection.builders import hyperparams_builder
from object_detection.predictors import mask_rcnn_box_predictor as box_predictor
from object_detection.protos import hyperparams_pb2
from object_detection.utils import test_case
+from object_detection.utils import tf_version
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.')
class MaskRCNNBoxPredictorTest(test_case.TestCase):
def _build_arg_scope_with_hyperparams(self,
diff --git a/research/object_detection/predictors/mask_rcnn_keras_box_predictor_test.py b/research/object_detection/predictors/mask_rcnn_keras_box_predictor_tf2_test.py
similarity index 76%
rename from research/object_detection/predictors/mask_rcnn_keras_box_predictor_test.py
rename to research/object_detection/predictors/mask_rcnn_keras_box_predictor_tf2_test.py
index fbffe44e29180ae5a11106b7fa88679e638529fe..a92db9e90fb8299ff449bb614886a9a5542033c3 100644
--- a/research/object_detection/predictors/mask_rcnn_keras_box_predictor_test.py
+++ b/research/object_detection/predictors/mask_rcnn_keras_box_predictor_tf2_test.py
@@ -14,6 +14,7 @@
# ==============================================================================
"""Tests for object_detection.predictors.mask_rcnn_box_predictor."""
+import unittest
import numpy as np
import tensorflow.compat.v1 as tf
@@ -23,8 +24,10 @@ from object_detection.builders import hyperparams_builder
from object_detection.predictors import mask_rcnn_keras_box_predictor as box_predictor
from object_detection.protos import hyperparams_pb2
from object_detection.utils import test_case
+from object_detection.utils import tf_version
+@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.')
class MaskRCNNKerasBoxPredictorTest(test_case.TestCase):
def _build_hyperparams(self,
@@ -46,17 +49,17 @@ class MaskRCNNKerasBoxPredictorTest(test_case.TestCase):
return hyperparams_builder.KerasLayerHyperparams(hyperparams)
def test_get_boxes_with_five_classes(self):
+ mask_box_predictor = (
+ box_predictor_builder.build_mask_rcnn_keras_box_predictor(
+ is_training=False,
+ num_classes=5,
+ fc_hyperparams=self._build_hyperparams(),
+ freeze_batchnorm=False,
+ use_dropout=False,
+ dropout_keep_prob=0.5,
+ box_code_size=4,
+ ))
def graph_fn(image_features):
- mask_box_predictor = (
- box_predictor_builder.build_mask_rcnn_keras_box_predictor(
- is_training=False,
- num_classes=5,
- fc_hyperparams=self._build_hyperparams(),
- freeze_batchnorm=False,
- use_dropout=False,
- dropout_keep_prob=0.5,
- box_code_size=4,
- ))
box_predictions = mask_box_predictor(
[image_features],
prediction_stage=2)
@@ -70,18 +73,19 @@ class MaskRCNNKerasBoxPredictorTest(test_case.TestCase):
self.assertAllEqual(class_predictions_with_background.shape, [2, 1, 6])
def test_get_boxes_with_five_classes_share_box_across_classes(self):
+ mask_box_predictor = (
+ box_predictor_builder.build_mask_rcnn_keras_box_predictor(
+ is_training=False,
+ num_classes=5,
+ fc_hyperparams=self._build_hyperparams(),
+ freeze_batchnorm=False,
+ use_dropout=False,
+ dropout_keep_prob=0.5,
+ box_code_size=4,
+ share_box_across_classes=True
+ ))
def graph_fn(image_features):
- mask_box_predictor = (
- box_predictor_builder.build_mask_rcnn_keras_box_predictor(
- is_training=False,
- num_classes=5,
- fc_hyperparams=self._build_hyperparams(),
- freeze_batchnorm=False,
- use_dropout=False,
- dropout_keep_prob=0.5,
- box_code_size=4,
- share_box_across_classes=True
- ))
+
box_predictions = mask_box_predictor(
[image_features],
prediction_stage=2)
@@ -95,19 +99,19 @@ class MaskRCNNKerasBoxPredictorTest(test_case.TestCase):
self.assertAllEqual(class_predictions_with_background.shape, [2, 1, 6])
def test_get_instance_masks(self):
+ mask_box_predictor = (
+ box_predictor_builder.build_mask_rcnn_keras_box_predictor(
+ is_training=False,
+ num_classes=5,
+ fc_hyperparams=self._build_hyperparams(),
+ freeze_batchnorm=False,
+ use_dropout=False,
+ dropout_keep_prob=0.5,
+ box_code_size=4,
+ conv_hyperparams=self._build_hyperparams(
+ op_type=hyperparams_pb2.Hyperparams.CONV),
+ predict_instance_masks=True))
def graph_fn(image_features):
- mask_box_predictor = (
- box_predictor_builder.build_mask_rcnn_keras_box_predictor(
- is_training=False,
- num_classes=5,
- fc_hyperparams=self._build_hyperparams(),
- freeze_batchnorm=False,
- use_dropout=False,
- dropout_keep_prob=0.5,
- box_code_size=4,
- conv_hyperparams=self._build_hyperparams(
- op_type=hyperparams_pb2.Hyperparams.CONV),
- predict_instance_masks=True))
box_predictions = mask_box_predictor(
[image_features],
prediction_stage=3)
diff --git a/research/object_detection/predictors/rfcn_box_predictor_test.py b/research/object_detection/predictors/rfcn_box_predictor_tf1_test.py
similarity index 95%
rename from research/object_detection/predictors/rfcn_box_predictor_test.py
rename to research/object_detection/predictors/rfcn_box_predictor_tf1_test.py
index 7a484c0855742b8d09622eb9c2b2fd6a6b7cede4..555c4b2adeaef6142884adbc5c4e1087084fd884 100644
--- a/research/object_detection/predictors/rfcn_box_predictor_test.py
+++ b/research/object_detection/predictors/rfcn_box_predictor_tf1_test.py
@@ -14,6 +14,7 @@
# ==============================================================================
"""Tests for object_detection.predictors.rfcn_box_predictor."""
+import unittest
import numpy as np
import tensorflow.compat.v1 as tf
@@ -22,8 +23,10 @@ from object_detection.builders import hyperparams_builder
from object_detection.predictors import rfcn_box_predictor as box_predictor
from object_detection.protos import hyperparams_pb2
from object_detection.utils import test_case
+from object_detection.utils import tf_version
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.')
class RfcnBoxPredictorTest(test_case.TestCase):
def _build_arg_scope_with_conv_hyperparams(self):
diff --git a/research/object_detection/predictors/rfcn_keras_box_predictor_test.py b/research/object_detection/predictors/rfcn_keras_box_predictor_tf2_test.py
similarity index 85%
rename from research/object_detection/predictors/rfcn_keras_box_predictor_test.py
rename to research/object_detection/predictors/rfcn_keras_box_predictor_tf2_test.py
index d8cc01e4b4bb0ca7faa1c6145dcc4013c42f7d01..f845068e35b37a9b0d77873fb5adbf59c78450ae 100644
--- a/research/object_detection/predictors/rfcn_keras_box_predictor_test.py
+++ b/research/object_detection/predictors/rfcn_keras_box_predictor_tf2_test.py
@@ -14,6 +14,7 @@
# ==============================================================================
"""Tests for object_detection.predictors.rfcn_box_predictor."""
+import unittest
import numpy as np
import tensorflow.compat.v1 as tf
@@ -22,8 +23,10 @@ from object_detection.builders import hyperparams_builder
from object_detection.predictors import rfcn_keras_box_predictor as box_predictor
from object_detection.protos import hyperparams_pb2
from object_detection.utils import test_case
+from object_detection.utils import tf_version
+@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.')
class RfcnKerasBoxPredictorTest(test_case.TestCase):
def _build_conv_hyperparams(self):
@@ -42,18 +45,17 @@ class RfcnKerasBoxPredictorTest(test_case.TestCase):
return hyperparams_builder.KerasLayerHyperparams(conv_hyperparams)
def test_get_correct_box_encoding_and_class_prediction_shapes(self):
-
+ rfcn_box_predictor = box_predictor.RfcnKerasBoxPredictor(
+ is_training=False,
+ num_classes=2,
+ conv_hyperparams=self._build_conv_hyperparams(),
+ freeze_batchnorm=False,
+ num_spatial_bins=[3, 3],
+ depth=4,
+ crop_size=[12, 12],
+ box_code_size=4)
def graph_fn(image_features, proposal_boxes):
- rfcn_box_predictor = box_predictor.RfcnKerasBoxPredictor(
- is_training=False,
- num_classes=2,
- conv_hyperparams=self._build_conv_hyperparams(),
- freeze_batchnorm=False,
- num_spatial_bins=[3, 3],
- depth=4,
- crop_size=[12, 12],
- box_code_size=4
- )
+
box_predictions = rfcn_box_predictor(
[image_features],
proposal_boxes=proposal_boxes)
diff --git a/research/object_detection/protos/center_net.proto b/research/object_detection/protos/center_net.proto
new file mode 100644
index 0000000000000000000000000000000000000000..5047c000f3d4ba22d42127b54e61fbb8726429f8
--- /dev/null
+++ b/research/object_detection/protos/center_net.proto
@@ -0,0 +1,203 @@
+syntax = "proto2";
+
+package object_detection.protos;
+
+import "object_detection/protos/image_resizer.proto";
+import "object_detection/protos/losses.proto";
+
+// Configuration for the CenterNet meta architecture from the "Objects as
+// Points" paper [1]
+// [1]: https://arxiv.org/abs/1904.07850
+
+message CenterNet {
+ // Number of classes to predict.
+ optional int32 num_classes = 1;
+
+ // Feature extractor config.
+ optional CenterNetFeatureExtractor feature_extractor = 2;
+
+ // Image resizer for preprocessing the input image.
+ optional ImageResizer image_resizer = 3;
+
+ // Parameters which are related to object detection task.
+ message ObjectDetection {
+ // The original fields are moved to ObjectCenterParams or deleted.
+ reserved 2, 5, 6, 7;
+
+ // Weight of the task loss. The total loss of the model will be the
+ // summation of task losses weighted by the weights.
+ optional float task_loss_weight = 1 [default = 1.0];
+
+ // Weight for the offset localization loss.
+ optional float offset_loss_weight = 3 [default = 1.0];
+
+ // Weight for the height/width localization loss.
+ optional float scale_loss_weight = 4 [default = 0.1];
+
+ // Localization loss configuration for object scale and offset losses.
+ optional LocalizationLoss localization_loss = 8;
+ }
+ optional ObjectDetection object_detection_task = 4;
+
+ // Parameters related to object center prediction. This is required for both
+ // object detection and keypoint estimation tasks.
+ message ObjectCenterParams {
+ // Weight for the object center loss.
+ optional float object_center_loss_weight = 1 [default = 1.0];
+
+ // Classification loss configuration for object center loss.
+ optional ClassificationLoss classification_loss = 2;
+
+ // The initial bias value of the convlution kernel of the class heatmap
+ // prediction head. -2.19 corresponds to predicting foreground with
+ // a probability of 0.1. See "Focal Loss for Dense Object Detection"
+ // at https://arxiv.org/abs/1708.02002.
+ optional float heatmap_bias_init = 3 [default = -2.19];
+
+ // The minimum IOU overlap boxes need to have to not be penalized.
+ optional float min_box_overlap_iou = 4 [default = 0.7];
+
+ // Maximum number of boxes to predict.
+ optional int32 max_box_predictions = 5 [default = 100];
+
+ // If set, loss is only computed for the labeled classes.
+ optional bool use_labeled_classes = 6 [default = false];
+ }
+ optional ObjectCenterParams object_center_params = 5;
+
+ // Path of the file that conatins the label map along with the keypoint
+ // information, including the keypoint indices, corresponding labels, and the
+ // corresponding class. The file should be the same one as used in the input
+ // pipeline. Note that a plain text of StringIntLabelMap proto is expected in
+ // this file.
+ // It is required only if the keypoint estimation task is specified.
+ optional string keypoint_label_map_path = 6;
+
+ // Parameters which are related to keypoint estimation task.
+ message KeypointEstimation {
+ // Name of the task, e.g. "human pose". Note that the task name should be
+ // unique to each keypoint task.
+ optional string task_name = 1;
+
+ // Weight of the task loss. The total loss of the model will be their
+ // summation of task losses weighted by the weights.
+ optional float task_loss_weight = 2 [default = 1.0];
+
+ // Loss configuration for keypoint heatmap, offset, regression losses. Note
+ // that the localization loss is used for offset/regression losses and
+ // classification loss is used for heatmap loss.
+ optional Loss loss = 3;
+
+ // The name of the class that contains the keypoints for this task. This is
+ // used to retrieve the corresponding keypoint indices from the label map.
+ // Note that this corresponds to the "name" field, not "display_name".
+ optional string keypoint_class_name = 4;
+
+ // The standard deviation of the Gaussian kernel used to generate the
+ // keypoint heatmap. The unit is the pixel in the output image. It is to
+ // provide the flexibility of using different sizes of Gaussian kernel for
+ // each keypoint class. Note that if provided, the keypoint standard
+ // deviations will be overridden by the specified values here, otherwise,
+ // the default value 5.0 will be used.
+ // TODO(yuhuic): Update the default value once we found the best value.
+ map keypoint_label_to_std = 5;
+
+ // Loss weights corresponding to different heads.
+ optional float keypoint_regression_loss_weight = 6 [default = 1.0];
+ optional float keypoint_heatmap_loss_weight = 7 [default = 1.0];
+ optional float keypoint_offset_loss_weight = 8 [default = 1.0];
+
+ // The initial bias value of the convolution kernel of the keypoint heatmap
+ // prediction head. -2.19 corresponds to predicting foreground with
+ // a probability of 0.1. See "Focal Loss for Dense Object Detection"
+ // at https://arxiv.org/abs/1708.02002.
+ optional float heatmap_bias_init = 9 [default = -2.19];
+
+ // The heatmap score threshold for a keypoint to become a valid candidate.
+ optional float keypoint_candidate_score_threshold = 10 [default = 0.1];
+
+ // The maximum number of candidates to retrieve for each keypoint.
+ optional int32 num_candidates_per_keypoint = 11 [default = 100];
+
+ // Max pool kernel size to use to pull off peak score locations in a
+ // neighborhood (independently for each keypoint types).
+ optional int32 peak_max_pool_kernel_size = 12 [default = 3];
+
+ // The default score to use for regressed keypoints that are not
+ // successfully snapped to a nearby candidate.
+ optional float unmatched_keypoint_score = 13 [default = 0.1];
+
+ // The multiplier to expand the bounding boxes (either the provided boxes or
+ // those which tightly cover the regressed keypoints). Note that new
+ // expanded box for an instance becomes the feasible search window for all
+ // associated keypoints.
+ optional float box_scale = 14 [default = 1.2];
+
+ // The scale parameter that multiplies the largest dimension of a bounding
+ // box. The resulting distance becomes a search radius for candidates in the
+ // vicinity of each regressed keypoint.
+ optional float candidate_search_scale = 15 [default = 0.3];
+
+ // One of ['min_distance', 'score_distance_ratio'] indicating how to select
+ // the keypoint candidate.
+ optional string candidate_ranking_mode = 16 [default = "min_distance"];
+
+ // The radius (in the unit of output pixel) around heatmap peak to assign
+ // the offset targets. If set 0, then the offset target will only be
+ // assigned to the heatmap peak (same behavior as the original paper).
+ optional int32 offset_peak_radius = 17 [default = 0];
+
+ // Indicates whether to assign offsets for each keypoint channel
+ // separately. If set False, the output offset target has the shape
+ // [batch_size, out_height, out_width, 2] (same behavior as the original
+ // paper). If set True, the output offset target has the shape [batch_size,
+ // out_height, out_width, 2 * num_keypoints] (recommended when the
+ // offset_peak_radius is not zero).
+ optional bool per_keypoint_offset = 18 [default = false];
+ }
+ repeated KeypointEstimation keypoint_estimation_task = 7;
+
+ // Parameters which are related to mask estimation task.
+ // Note: Currently, CenterNet supports a weak instance segmentation, where
+ // semantic segmentation masks are estimated, and then cropped based on
+ // bounding box detections. Therefore, it is possible for the same image
+ // pixel to be assigned to multiple instances.
+ message MaskEstimation {
+ // Weight of the task loss. The total loss of the model will be their
+ // summation of task losses weighted by the weights.
+ optional float task_loss_weight = 1 [default = 1.0];
+
+ // Classification loss configuration for segmentation loss.
+ optional ClassificationLoss classification_loss = 2;
+
+ // Each instance mask (one per detection) is cropped and resized (bilinear
+ // resampling) from the predicted segmentation feature map. After
+ // resampling, the masks are binarized with the provided score threshold.
+ optional int32 mask_height = 4 [default = 256];
+ optional int32 mask_width = 5 [default = 256];
+ optional float score_threshold = 6 [default = 0.5];
+
+ // The initial bias value of the convlution kernel of the class heatmap
+ // prediction head. -2.19 corresponds to predicting foreground with
+ // a probability of 0.1.
+ optional float heatmap_bias_init = 3 [default = -2.19];
+ }
+ optional MaskEstimation mask_estimation_task = 8;
+}
+
+message CenterNetFeatureExtractor {
+ optional string type = 1;
+
+ // Channel means to be subtracted from each image channel. If not specified,
+ // we use a default value of 0.
+ repeated float channel_means = 2;
+
+ // Channel standard deviations. Each channel will be normalized by dividing
+ // it by its standard deviation. If not specified, we use a default value
+ // of 1.
+ repeated float channel_stds = 3;
+
+ // If set, will change channel order to be [blue, green, red]. This can be
+ // useful to be compatible with some pre-trained feature extractors.
+ optional bool bgr_ordering = 4 [default = false];
+}
diff --git a/research/object_detection/protos/faster_rcnn.proto b/research/object_detection/protos/faster_rcnn.proto
index 7e06fbcf41986e4272d09c57ae68fd2e50034034..486cc77ea8b156fb54500b0bbf7a01d4b17ac7b6 100644
--- a/research/object_detection/protos/faster_rcnn.proto
+++ b/research/object_detection/protos/faster_rcnn.proto
@@ -188,7 +188,7 @@ message Context {
// Next id: 4
// The maximum number of contextual features per-image, used for padding
- optional int32 max_num_context_features = 1 [default = 8500];
+ optional int32 max_num_context_features = 1 [default = 2000];
// The bottleneck feature dimension of the attention block.
optional int32 attention_bottleneck_dimension = 2 [default = 2048];
diff --git a/research/object_detection/protos/hyperparams.proto b/research/object_detection/protos/hyperparams.proto
index 2b1053877613c3ced70515a559014b6463175f9e..e2fee247ca1303dfdbb9bdb69f187b7520c4e89c 100644
--- a/research/object_detection/protos/hyperparams.proto
+++ b/research/object_detection/protos/hyperparams.proto
@@ -52,6 +52,12 @@ message Hyperparams {
// Whether depthwise convolutions should be regularized. If this parameter is
// NOT set then the conv hyperparams will default to the parent scope.
optional bool regularize_depthwise = 6 [default = false];
+
+ // By default, use_bias is set to False if batch_norm is not None and
+ // batch_norm.center is True. When force_use_bias is set to True, this
+ // behavior will be overridden, and use_bias will be set to True, regardless
+ // of batch norm parameters. Note, this only applies to KerasLayerHyperparams.
+ optional bool force_use_bias = 8 [default = false];
}
// Proto with one-of field for regularizers.
diff --git a/research/object_detection/protos/input_reader.proto b/research/object_detection/protos/input_reader.proto
index 2d9deda117d69031b9acca8047c8a3bc1db2c4a4..27d022532dc14fffc2b8078a500933d44ae5bf68 100644
--- a/research/object_detection/protos/input_reader.proto
+++ b/research/object_detection/protos/input_reader.proto
@@ -31,7 +31,7 @@ enum InputType {
TF_SEQUENCE_EXAMPLE = 2; // TfSequenceExample Input
}
-// Next id: 31
+// Next id: 32
message InputReader {
// Name of input reader. Typically used to describe the dataset that is read
// by this input reader.
@@ -119,6 +119,10 @@ message InputReader {
// Type of instance mask.
optional InstanceMaskType mask_type = 10 [default = NUMERICAL_MASKS];
+ // Whether to load DensePose data. If set, must also set load_instance_masks
+ // to true.
+ optional bool load_dense_pose = 31 [default = false];
+
// Whether to use the display name when decoding examples. This is only used
// when mapping class text strings to integers.
optional bool use_display_name = 17 [default = false];
diff --git a/research/object_detection/protos/model.proto b/research/object_detection/protos/model.proto
index 9333f2df149162b830950a991a18a0155941ca30..4fb6aed0b790e1f5b621a0d0c7c788cb80876c15 100644
--- a/research/object_detection/protos/model.proto
+++ b/research/object_detection/protos/model.proto
@@ -2,6 +2,7 @@ syntax = "proto2";
package object_detection.protos;
+import "object_detection/protos/center_net.proto";
import "object_detection/protos/faster_rcnn.proto";
import "object_detection/protos/ssd.proto";
@@ -17,6 +18,7 @@ message DetectionModel {
// value to a function that builds your model.
ExperimentalModel experimental_model = 3;
+ CenterNet center_net = 4;
}
}
diff --git a/research/object_detection/protos/preprocessor.proto b/research/object_detection/protos/preprocessor.proto
index aa83939f334b3bbd80e54c2ac7f367cb9cbf8869..3201df2bd08d551fda346a54282c866aa9890c28 100644
--- a/research/object_detection/protos/preprocessor.proto
+++ b/research/object_detection/protos/preprocessor.proto
@@ -57,7 +57,8 @@ message NormalizeImage {
optional float target_maxval = 4 [default=1];
}
-// Randomly horizontally flips the image and detections 50% of the time.
+// Randomly horizontally flips the image and detections with the specified
+// probability, default to 50% of the time.
message RandomHorizontalFlip {
// Specifies a mapping from the original keypoint indices to horizontally
// flipped indices. This is used in the event that keypoints are specified,
@@ -71,10 +72,15 @@ message RandomHorizontalFlip {
// keypoint_flip_permutation: 3
// keypoint_flip_permutation: 5
// keypoint_flip_permutation: 4
+ // If nothing is specified the order of keypoint will be mantained.
repeated int32 keypoint_flip_permutation = 1;
+
+ // The probability of running this augmentation for each image.
+ optional float probability = 2 [default=0.5];
}
-// Randomly vertically flips the image and detections 50% of the time.
+// Randomly vertically flips the image and detections with the specified
+// probability, default to 50% of the time.
message RandomVerticalFlip {
// Specifies a mapping from the original keypoint indices to vertically
// flipped indices. This is used in the event that keypoints are specified,
@@ -89,11 +95,23 @@ message RandomVerticalFlip {
// keypoint_flip_permutation: 5
// keypoint_flip_permutation: 4
repeated int32 keypoint_flip_permutation = 1;
+
+ // The probability of running this augmentation for each image.
+ optional float probability = 2 [default=0.5];
}
// Randomly rotates the image and detections by 90 degrees counter-clockwise
-// 50% of the time.
-message RandomRotation90 {}
+// with the specified probability, default to 50% of the time.
+message RandomRotation90 {
+ // Specifies a mapping from the original keypoint indices to 90 degree counter
+ // clockwise indices. This is used in the event that keypoints are specified,
+ // in which case when the image is rotated the keypoints might need to be
+ // permuted.
+ repeated int32 keypoint_rot_permutation = 1;
+
+ // The probability of running this augmentation for each image.
+ optional float probability = 2 [default=0.5];
+}
// Randomly scales the values of all pixels in the image by some constant value
// between [minval, maxval], then clip the value to a range between [0, 1.0].
@@ -457,7 +475,6 @@ message SSDRandomCropPadFixedAspectRatio {
// Converts class logits to softmax optionally scaling the values by temperature
// first.
message ConvertClassLogitsToSoftmax {
-
// Scale to use on logits before applying softmax.
optional float temperature = 1 [default=1.0];
}
@@ -472,12 +489,10 @@ message RandomSelfConcatImage {
// Apply an Autoaugment policy to the image and bounding boxes.
message AutoAugmentImage {
-
// What AutoAugment policy to apply to the Image
optional string policy_name = 1 [default="v0"];
}
-
// Randomly drops ground truth boxes for a label with some probability.
message DropLabelProbabilistically {
// The label that should be dropped. This corresponds to one of the entries
@@ -487,7 +502,6 @@ message DropLabelProbabilistically {
optional float drop_probability = 2 [default = 1.0];
}
-
//Remap a set of labels to a new label.
message RemapLabels {
// Labels to be remapped.
diff --git a/research/object_detection/protos/train.proto b/research/object_detection/protos/train.proto
index 0da8b2ede150f645e44e3d8f8a5ba995703c8712..62d326cdf67c7329ddaa22250a4f2734a4f43066 100644
--- a/research/object_detection/protos/train.proto
+++ b/research/object_detection/protos/train.proto
@@ -59,7 +59,8 @@ message TrainConfig {
// Whether to load all checkpoint vars that match model variable names and
// sizes. This option is only available if `from_detection_checkpoint` is
- // True.
+ // True. This option is *not* supported for TF2 --- setting it to true
+ // will raise an error.
optional bool load_all_detection_checkpoint_vars = 19 [default = false];
// Number of steps to train the DetectionModel for. If 0, will train the model
diff --git a/research/object_detection/samples/configs/context_rcnn_resnet101_snapshot_serengeti.config b/research/object_detection/samples/configs/context_rcnn_resnet101_snapshot_serengeti.config
new file mode 100644
index 0000000000000000000000000000000000000000..8167731c7b72b5598391c65794f9f8c265903a4c
--- /dev/null
+++ b/research/object_detection/samples/configs/context_rcnn_resnet101_snapshot_serengeti.config
@@ -0,0 +1,164 @@
+# Context R-CNN configuration for Snapshot Serengeti Dataset, with sequence
+# example input data with context_features.
+# This model uses attention into contextual features within the Faster R-CNN
+# object detection framework to improve object detection performance.
+# See https://arxiv.org/abs/1912.03538 for more information.
+# Search for "PATH_TO_BE_CONFIGURED" to find the fields that should be
+# configured.
+
+model {
+ faster_rcnn {
+ num_classes: 48
+ image_resizer {
+ fixed_shape_resizer {
+ height: 640
+ width: 640
+ }
+ }
+ feature_extractor {
+ type: "faster_rcnn_resnet101"
+ first_stage_features_stride: 16
+ batch_norm_trainable: true
+ }
+ first_stage_anchor_generator {
+ grid_anchor_generator {
+ height_stride: 16
+ width_stride: 16
+ scales: 0.25
+ scales: 0.5
+ scales: 1.0
+ scales: 2.0
+ aspect_ratios: 0.5
+ aspect_ratios: 1.0
+ aspect_ratios: 2.0
+ }
+ }
+ first_stage_box_predictor_conv_hyperparams {
+ op: CONV
+ regularizer {
+ l2_regularizer {
+ weight: 0.0
+ }
+ }
+ initializer {
+ truncated_normal_initializer {
+ stddev: 0.00999999977648
+ }
+ }
+ }
+ first_stage_nms_score_threshold: 0.0
+ first_stage_nms_iou_threshold: 0.699999988079
+ first_stage_max_proposals: 300
+ first_stage_localization_loss_weight: 2.0
+ first_stage_objectness_loss_weight: 1.0
+ initial_crop_size: 14
+ maxpool_kernel_size: 2
+ maxpool_stride: 2
+ second_stage_box_predictor {
+ mask_rcnn_box_predictor {
+ fc_hyperparams {
+ op: FC
+ regularizer {
+ l2_regularizer {
+ weight: 0.0
+ }
+ }
+ initializer {
+ variance_scaling_initializer {
+ factor: 1.0
+ uniform: true
+ mode: FAN_AVG
+ }
+ }
+ }
+ use_dropout: false
+ dropout_keep_probability: 1.0
+ share_box_across_classes: true
+ }
+ }
+ second_stage_post_processing {
+ batch_non_max_suppression {
+ score_threshold: 0.0
+ iou_threshold: 0.600000023842
+ max_detections_per_class: 100
+ max_total_detections: 300
+ }
+ score_converter: SOFTMAX
+ }
+ second_stage_localization_loss_weight: 2.0
+ second_stage_classification_loss_weight: 1.0
+ use_matmul_crop_and_resize: true
+ clip_anchors_to_image: true
+ use_matmul_gather_in_matcher: true
+ use_static_balanced_label_sampler: true
+ use_static_shapes: true
+ context_config {
+ max_num_context_features: 2000
+ context_feature_length: 2057
+ }
+ }
+}
+train_config {
+ batch_size: 8
+ data_augmentation_options {
+ random_horizontal_flip {
+ }
+ }
+ sync_replicas: true
+ optimizer {
+ momentum_optimizer {
+ learning_rate {
+ manual_step_learning_rate {
+ initial_learning_rate: 0.0
+ schedule {
+ step: 400000
+ learning_rate: 0.002
+ }
+ schedule {
+ step: 500000
+ learning_rate: 0.0002
+ }
+ schedule {
+ step: 600000
+ learning_rate: 0.00002
+ }
+ warmup: true
+ }
+ }
+ momentum_optimizer_value: 0.9
+ }
+ use_moving_average: false
+ }
+ gradient_clipping_by_norm: 10.0
+ fine_tune_checkpoint: "PATH_TO_BE_CONFIGURED/faster_rcnn_resnet101_coco_2018_08_14/model.ckpt"
+ from_detection_checkpoint: true
+ num_steps: 5000000
+ replicas_to_aggregate: 8
+ max_number_of_boxes: 100
+ unpad_groundtruth_tensors: false
+ use_bfloat16: true
+}
+train_input_reader {
+ label_map_path: "PATH_TO_BE_CONFIGURED/ss_label_map.pbtxt"
+ tf_record_input_reader {
+ input_path: "PATH_TO_BE_CONFIGURED/snapshot_serengeti_train-?????-of-?????"
+ }
+ load_context_features: true
+ input_type: TF_SEQUENCE_EXAMPLE
+}
+eval_config {
+ max_evals: 50
+ metrics_set: "coco_detection_metrics"
+ use_moving_averages: false
+ batch_size: 1
+}
+eval_input_reader {
+ label_map_path: "PATH_TO_BE_CONFIGURED/ss_label_map.pbtxt"
+ shuffle: false
+ num_epochs: 1
+ tf_record_input_reader {
+ input_path: "PATH_TO_BE_CONFIGURED/snapshot_serengeti_val-?????-of-?????"
+ }
+ load_context_features: true
+ input_type: TF_SEQUENCE_EXAMPLE
+}
diff --git a/research/object_detection/samples/configs/context_rcnn_resnet101_snapshot_serengeti_sync.config b/research/object_detection/samples/configs/context_rcnn_resnet101_snapshot_serengeti_sync.config
new file mode 100644
index 0000000000000000000000000000000000000000..b96dea467ed600c648219595f33c4b147a0c3215
--- /dev/null
+++ b/research/object_detection/samples/configs/context_rcnn_resnet101_snapshot_serengeti_sync.config
@@ -0,0 +1,166 @@
+# Context R-CNN configuration for Snapshot Serengeti Dataset, with sequence
+# example input data with context_features.
+# This model uses attention into contextual features within the Faster R-CNN
+# object detection framework to improve object detection performance.
+# See https://arxiv.org/abs/1912.03538 for more information.
+# Search for "PATH_TO_BE_CONFIGURED" to find the fields that should be
+# configured.
+
+# This config is TPU compatible.
+
+model {
+ faster_rcnn {
+ num_classes: 48
+ image_resizer {
+ fixed_shape_resizer {
+ height: 640
+ width: 640
+ }
+ }
+ feature_extractor {
+ type: "faster_rcnn_resnet101"
+ first_stage_features_stride: 16
+ batch_norm_trainable: true
+ }
+ first_stage_anchor_generator {
+ grid_anchor_generator {
+ height_stride: 16
+ width_stride: 16
+ scales: 0.25
+ scales: 0.5
+ scales: 1.0
+ scales: 2.0
+ aspect_ratios: 0.5
+ aspect_ratios: 1.0
+ aspect_ratios: 2.0
+ }
+ }
+ first_stage_box_predictor_conv_hyperparams {
+ op: CONV
+ regularizer {
+ l2_regularizer {
+ weight: 0.0
+ }
+ }
+ initializer {
+ truncated_normal_initializer {
+ stddev: 0.00999999977648
+ }
+ }
+ }
+ first_stage_nms_score_threshold: 0.0
+ first_stage_nms_iou_threshold: 0.699999988079
+ first_stage_max_proposals: 300
+ first_stage_localization_loss_weight: 2.0
+ first_stage_objectness_loss_weight: 1.0
+ initial_crop_size: 14
+ maxpool_kernel_size: 2
+ maxpool_stride: 2
+ second_stage_box_predictor {
+ mask_rcnn_box_predictor {
+ fc_hyperparams {
+ op: FC
+ regularizer {
+ l2_regularizer {
+ weight: 0.0
+ }
+ }
+ initializer {
+ variance_scaling_initializer {
+ factor: 1.0
+ uniform: true
+ mode: FAN_AVG
+ }
+ }
+ }
+ use_dropout: false
+ dropout_keep_probability: 1.0
+ share_box_across_classes: true
+ }
+ }
+ second_stage_post_processing {
+ batch_non_max_suppression {
+ score_threshold: 0.0
+ iou_threshold: 0.600000023842
+ max_detections_per_class: 100
+ max_total_detections: 300
+ }
+ score_converter: SOFTMAX
+ }
+ second_stage_localization_loss_weight: 2.0
+ second_stage_classification_loss_weight: 1.0
+ use_matmul_crop_and_resize: true
+ clip_anchors_to_image: true
+ use_matmul_gather_in_matcher: true
+ use_static_balanced_label_sampler: true
+ use_static_shapes: true
+ context_config {
+ max_num_context_features: 2000
+ context_feature_length: 2057
+ }
+ }
+}
+train_config {
+ batch_size: 64
+ data_augmentation_options {
+ random_horizontal_flip {
+ }
+ }
+ sync_replicas: true
+ optimizer {
+ momentum_optimizer {
+ learning_rate {
+ manual_step_learning_rate {
+ initial_learning_rate: 0.0
+ schedule {
+ step: 2000
+ learning_rate: 0.00200000009499
+ }
+ schedule {
+ step: 200000
+ learning_rate: 0.000199999994948
+ }
+ schedule {
+ step: 300000
+ learning_rate: 1.99999994948e-05
+ }
+ warmup: true
+ }
+ }
+ momentum_optimizer_value: 0.899999976158
+ }
+ use_moving_average: false
+ }
+ gradient_clipping_by_norm: 10.0
+ fine_tune_checkpoint: "PATH_TO_BE_CONFIGURED/faster_rcnn_resnet101_coco_2018_08_14/model.ckpt"
+ from_detection_checkpoint: true
+ num_steps: 500000
+ replicas_to_aggregate: 8
+ max_number_of_boxes: 100
+ unpad_groundtruth_tensors: false
+ use_bfloat16: true
+}
+train_input_reader {
+ label_map_path: "PATH_TO_BE_CONFIGURED/ss_label_map.pbtxt"
+ tf_record_input_reader {
+ input_path: "PATH_TO_BE_CONFIGURED/snapshot_serengeti_train-?????-of-?????"
+ }
+ load_context_features: true
+ input_type: TF_SEQUENCE_EXAMPLE
+}
+eval_config {
+ max_evals: 50
+ metrics_set: "coco_detection_metrics"
+ use_moving_averages: false
+ batch_size: 4
+}
+eval_input_reader {
+ label_map_path: "PATH_TO_BE_CONFIGURED/ss_label_map.pbtxt"
+ shuffle: false
+ num_epochs: 1
+ tf_record_input_reader {
+ input_path: "PATH_TO_BE_CONFIGURED/snapshot_serengeti_val-?????-of-?????"
+ }
+ load_context_features: true
+ input_type: TF_SEQUENCE_EXAMPLE
+}
diff --git a/research/object_detection/samples/configs/ssdlite_mobiledet_gpu_320x320_coco_sync_4x4.config b/research/object_detection/samples/configs/ssdlite_mobiledet_gpu_320x320_coco_sync_4x4.config
new file mode 100644
index 0000000000000000000000000000000000000000..634eb9e49a08d80e8735778d05da99f0c25cbdc2
--- /dev/null
+++ b/research/object_detection/samples/configs/ssdlite_mobiledet_gpu_320x320_coco_sync_4x4.config
@@ -0,0 +1,204 @@
+# SSDLite with MobileDet-GPU feature extractor.
+# Reference: Xiong & Liu et al., https://arxiv.org/abs/2004.14525
+# Trained on COCO, initialized from scratch.
+#
+# 5.07B MulAdds, 13.11M Parameters.
+# Latencies are 11.0ms (fp32), 3.2ms (fp16) and 2.3ms (int8) on Jetson Xavier,
+# optimized using TensorRT 7.1.
+# Achieves 28.7 mAP on COCO14 minival dataset.
+# Achieves 27.5 mAP on COCO17 val dataset.
+#
+# This config is TPU compatible.
+
+model {
+ ssd {
+ inplace_batchnorm_update: true
+ freeze_batchnorm: false
+ num_classes: 90
+ box_coder {
+ faster_rcnn_box_coder {
+ y_scale: 10.0
+ x_scale: 10.0
+ height_scale: 5.0
+ width_scale: 5.0
+ }
+ }
+ matcher {
+ argmax_matcher {
+ matched_threshold: 0.5
+ unmatched_threshold: 0.5
+ ignore_thresholds: false
+ negatives_lower_than_unmatched: true
+ force_match_for_each_row: true
+ use_matmul_gather: true
+ }
+ }
+ similarity_calculator {
+ iou_similarity {
+ }
+ }
+ encode_background_as_zeros: true
+ anchor_generator {
+ ssd_anchor_generator {
+ num_layers: 6
+ min_scale: 0.2
+ max_scale: 0.95
+ aspect_ratios: 1.0
+ aspect_ratios: 2.0
+ aspect_ratios: 0.5
+ aspect_ratios: 3.0
+ aspect_ratios: 0.3333
+ }
+ }
+ image_resizer {
+ fixed_shape_resizer {
+ height: 320
+ width: 320
+ }
+ }
+ box_predictor {
+ convolutional_box_predictor {
+ min_depth: 0
+ max_depth: 0
+ num_layers_before_predictor: 0
+ use_dropout: false
+ dropout_keep_probability: 0.8
+ kernel_size: 3
+ use_depthwise: true
+ box_code_size: 4
+ apply_sigmoid_to_scores: false
+ class_prediction_bias_init: -4.6
+ conv_hyperparams {
+ activation: RELU_6,
+ regularizer {
+ l2_regularizer {
+ weight: 0.00004
+ }
+ }
+ initializer {
+ random_normal_initializer {
+ stddev: 0.03
+ mean: 0.0
+ }
+ }
+ batch_norm {
+ train: true,
+ scale: true,
+ center: true,
+ decay: 0.97,
+ epsilon: 0.001,
+ }
+ }
+ }
+ }
+ feature_extractor {
+ type: 'ssd_mobiledet_gpu'
+ min_depth: 16
+ depth_multiplier: 1.0
+ use_depthwise: true
+ conv_hyperparams {
+ activation: RELU_6,
+ regularizer {
+ l2_regularizer {
+ weight: 0.00004
+ }
+ }
+ initializer {
+ truncated_normal_initializer {
+ stddev: 0.03
+ mean: 0.0
+ }
+ }
+ batch_norm {
+ train: true,
+ scale: true,
+ center: true,
+ decay: 0.97,
+ epsilon: 0.001,
+ }
+ }
+ override_base_feature_extractor_hyperparams: false
+ }
+ loss {
+ classification_loss {
+ weighted_sigmoid_focal {
+ alpha: 0.75,
+ gamma: 2.0
+ }
+ }
+ localization_loss {
+ weighted_smooth_l1 {
+ delta: 1.0
+ }
+ }
+ classification_weight: 1.0
+ localization_weight: 1.0
+ }
+ normalize_loss_by_num_matches: true
+ normalize_loc_loss_by_codesize: true
+ post_processing {
+ batch_non_max_suppression {
+ score_threshold: 1e-8
+ iou_threshold: 0.6
+ max_detections_per_class: 100
+ max_total_detections: 100
+ use_static_shapes: true
+ }
+ score_converter: SIGMOID
+ }
+ }
+}
+
+train_config: {
+ batch_size: 512
+ sync_replicas: true
+ startup_delay_steps: 0
+ replicas_to_aggregate: 32
+ num_steps: 400000
+ data_augmentation_options {
+ random_horizontal_flip {
+ }
+ }
+ data_augmentation_options {
+ ssd_random_crop {
+ }
+ }
+ optimizer {
+ momentum_optimizer: {
+ learning_rate: {
+ cosine_decay_learning_rate {
+ learning_rate_base: 0.8
+ total_steps: 400000
+ warmup_learning_rate: 0.13333
+ warmup_steps: 2000
+ }
+ }
+ momentum_optimizer_value: 0.9
+ }
+ use_moving_average: false
+ }
+ max_number_of_boxes: 100
+ unpad_groundtruth_tensors: false
+}
+
+train_input_reader: {
+ label_map_path: "PATH_TO_BE_CONFIGURED/mscoco_label_map.pbtxt"
+ tf_record_input_reader {
+ input_path: "PATH_TO_BE_CONFIGURED/mscoco_train.record-?????-of-00100"
+ }
+}
+
+eval_config: {
+ metrics_set: "coco_detection_metrics"
+ use_moving_averages: false
+ num_examples: 8000
+}
+
+eval_input_reader: {
+ label_map_path: "PATH_TO_BE_CONFIGURED/mscoco_label_map.pbtxt"
+ shuffle: false
+ num_epochs: 1
+ tf_record_input_reader {
+ input_path: "PATH_TO_BE_CONFIGURED/mscoco_val.record-?????-of-00010"
+ }
+}
diff --git a/research/object_detection/test_images/snapshot_serengeti/README.md b/research/object_detection/test_images/snapshot_serengeti/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..bec44871e23e0970eddad571af55f7f12dd8f8c9
--- /dev/null
+++ b/research/object_detection/test_images/snapshot_serengeti/README.md
@@ -0,0 +1,17 @@
+# Citation and license
+
+The images and metadata in this folder come from the Snapshot Serengeti dataset,
+and were accessed via [LILA.science](http://lila.science/datasets/snapshot-serengeti).
+The images and species-level labels are described in more detail in the
+associated manuscript:
+
+```
+Swanson AB, Kosmala M, Lintott CJ, Simpson RJ, Smith A, Packer C (2015)
+Snapshot Serengeti, high-frequency annotated camera trap images of 40 mammalian
+species in an African savanna. Scientific Data 2: 150026. (DOI) (bibtex)
+```
+
+Please cite this manuscript if you use this dataset.
+
+This data set is released under the
+[Community Data License Agreement (permissive variant)](https://cdla.io/permissive-1-0/).
diff --git a/research/object_detection/test_images/snapshot_serengeti/S1_E03_R3_PICT0038.jpeg b/research/object_detection/test_images/snapshot_serengeti/S1_E03_R3_PICT0038.jpeg
new file mode 100644
index 0000000000000000000000000000000000000000..a843d7618c5968b367babc3f6778b2a1efbecd1c
Binary files /dev/null and b/research/object_detection/test_images/snapshot_serengeti/S1_E03_R3_PICT0038.jpeg differ
diff --git a/research/object_detection/test_images/snapshot_serengeti/S1_E03_R3_PICT0039.jpeg b/research/object_detection/test_images/snapshot_serengeti/S1_E03_R3_PICT0039.jpeg
new file mode 100644
index 0000000000000000000000000000000000000000..5bc16924b51f57832ccf3bac1b401467c24d2842
Binary files /dev/null and b/research/object_detection/test_images/snapshot_serengeti/S1_E03_R3_PICT0039.jpeg differ
diff --git a/research/object_detection/test_images/snapshot_serengeti/S1_E03_R3_PICT0040.jpeg b/research/object_detection/test_images/snapshot_serengeti/S1_E03_R3_PICT0040.jpeg
new file mode 100644
index 0000000000000000000000000000000000000000..2ab245ae8da7a700a30913e07251d32c55d6cfa8
Binary files /dev/null and b/research/object_detection/test_images/snapshot_serengeti/S1_E03_R3_PICT0040.jpeg differ
diff --git a/research/object_detection/test_images/snapshot_serengeti/S1_E03_R3_PICT0041.jpeg b/research/object_detection/test_images/snapshot_serengeti/S1_E03_R3_PICT0041.jpeg
new file mode 100644
index 0000000000000000000000000000000000000000..7159d71611ed80831cd489dbf1b17abff8db1508
Binary files /dev/null and b/research/object_detection/test_images/snapshot_serengeti/S1_E03_R3_PICT0041.jpeg differ
diff --git a/research/object_detection/test_images/snapshot_serengeti/context_rcnn_demo_metadata.json b/research/object_detection/test_images/snapshot_serengeti/context_rcnn_demo_metadata.json
new file mode 100644
index 0000000000000000000000000000000000000000..110793e2fbaa231106c527eac410618ba55a1a93
--- /dev/null
+++ b/research/object_detection/test_images/snapshot_serengeti/context_rcnn_demo_metadata.json
@@ -0,0 +1 @@
+{"images": [{"file_name": "models/research/object_detection/test_images/snapshot_serengeti/S1_E03_R3_PICT0038.jpeg", "frame_num": 0, "seq_num_frames": 2, "id": "S1/E03/E03_R3/S1_E03_R3_PICT0038", "height": 1536, "season": "S1", "date_captured": "2010-08-07 01:04:14", "width": 2048, "seq_id": "ASG0003041", "location": "E03"}, {"file_name": "models/research/object_detection/test_images/snapshot_serengeti/S1_E03_R3_PICT0039.jpeg", "frame_num": 1, "seq_num_frames": 2, "id": "S1/E03/E03_R3/S1_E03_R3_PICT0039", "height": 1536, "season": "S1", "date_captured": "2010-08-07 01:04:14", "width": 2048, "seq_id": "ASG0003041", "location": "E03"}, {"file_name": "models/research/object_detection/test_images/snapshot_serengeti/S1_E03_R3_PICT0040.jpeg", "frame_num": 0, "seq_num_frames": 2, "id": "S1/E03/E03_R3/S1_E03_R3_PICT0040", "height": 1536, "season": "S1", "date_captured": "2010-08-07 02:53:46", "width": 2048, "seq_id": "ASG0003042", "location": "E03"}, {"file_name": "models/research/object_detection/test_images/snapshot_serengeti/S1_E03_R3_PICT0041.jpeg", "frame_num": 1, "seq_num_frames": 2, "id": "S1/E03/E03_R3/S1_E03_R3_PICT0041", "height": 1536, "season": "S1", "date_captured": "2010-08-07 02:53:46", "width": 2048, "seq_id": "ASG0003042", "location": "E03"}], "categories": [{"name": "empty", "id": 0}, {"name": "human", "id": 1}, {"name": "gazelleGrants", "id": 2}, {"name": "reedbuck", "id": 3}, {"name": "dikDik", "id": 4}, {"name": "zebra", "id": 5}, {"name": "porcupine", "id": 6}, {"name": "gazelleThomsons", "id": 7}, {"name": "hyenaSpotted", "id": 8}, {"name": "warthog", "id": 9}, {"name": "impala", "id": 10}, {"name": "elephant", "id": 11}, {"name": "giraffe", "id": 12}, {"name": "mongoose", "id": 13}, {"name": "buffalo", "id": 14}, {"name": "hartebeest", "id": 15}, {"name": "guineaFowl", "id": 16}, {"name": "wildebeest", "id": 17}, {"name": "leopard", "id": 18}, {"name": "ostrich", "id": 19}, {"name": "lionFemale", "id": 20}, {"name": "koriBustard", "id": 21}, {"name": "otherBird", "id": 22}, {"name": "batEaredFox", "id": 23}, {"name": "bushbuck", "id": 24}, {"name": "jackal", "id": 25}, {"name": "cheetah", "id": 26}, {"name": "eland", "id": 27}, {"name": "aardwolf", "id": 28}, {"name": "hippopotamus", "id": 29}, {"name": "hyenaStriped", "id": 30}, {"name": "aardvark", "id": 31}, {"name": "hare", "id": 32}, {"name": "baboon", "id": 33}, {"name": "vervetMonkey", "id": 34}, {"name": "waterbuck", "id": 35}, {"name": "secretaryBird", "id": 36}, {"name": "serval", "id": 37}, {"name": "lionMale", "id": 38}, {"name": "topi", "id": 39}, {"name": "honeyBadger", "id": 40}, {"name": "rodents", "id": 41}, {"name": "wildcat", "id": 42}, {"name": "civet", "id": 43}, {"name": "genet", "id": 44}, {"name": "caracal", "id": 45}, {"name": "rhinoceros", "id": 46}, {"name": "reptiles", "id": 47}, {"name": "zorilla", "id": 48}], "annotations": [{"category_id": 29, "image_id": "S1/E03/E03_R3/S1_E03_R3_PICT0038", "bbox": [614.9233639240294, 476.2385201454182, 685.5741333961523, 374.18740868568574], "id": "0154T1541168895361"}, {"category_id": 29, "image_id": "S1/E03/E03_R3/S1_E03_R3_PICT0039", "bbox": [382.03749418258434, 471.005129814144, 756.2249028682752, 397.73766517639683], "id": "Lxtry1541168934504"}, {"category_id": 29, "image_id": "S1/E03/E03_R3/S1_E03_R3_PICT0040", "bbox": [786.9475708007834, 461.0229187011687, 749.0524291992166, 385.0301413536], "id": "Xmyih1541168739115"}, {"category_id": 29, "image_id": "S1/E03/E03_R3/S1_E03_R3_PICT0041", "bbox": [573.8866577148518, 453.0573425292903, 845.0, 398.9770812988263], "id": "ZllAa1541168769217"}]}
\ No newline at end of file
diff --git a/research/object_detection/tpu_exporters/export_saved_model_tpu_lib_test.py b/research/object_detection/tpu_exporters/export_saved_model_tpu_lib_tf1_test.py
similarity index 95%
rename from research/object_detection/tpu_exporters/export_saved_model_tpu_lib_test.py
rename to research/object_detection/tpu_exporters/export_saved_model_tpu_lib_tf1_test.py
index 4bbffed3655a764bedc88c2cf1f9f0b7f483b116..653535aa3b75576fa73662c3fc3ea2d257908107 100644
--- a/research/object_detection/tpu_exporters/export_saved_model_tpu_lib_test.py
+++ b/research/object_detection/tpu_exporters/export_saved_model_tpu_lib_tf1_test.py
@@ -19,12 +19,14 @@ from __future__ import division
from __future__ import print_function
import os
+import unittest
from absl.testing import parameterized
import numpy as np
import tensorflow.compat.v1 as tf
from object_detection.tpu_exporters import export_saved_model_tpu_lib
+from object_detection.utils import tf_version
flags = tf.app.flags
FLAGS = flags.FLAGS
@@ -35,6 +37,7 @@ def get_path(path_suffix):
path_suffix)
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.')
class ExportSavedModelTPUTest(tf.test.TestCase, parameterized.TestCase):
@parameterized.named_parameters(
diff --git a/research/object_detection/utils/bifpn_utils.py b/research/object_detection/utils/bifpn_utils.py
index b4b2443553850cd5ce4ac9f921f614552cd4364b..d14cb841e3eb848bec6e4bf4257ee70e768b5108 100644
--- a/research/object_detection/utils/bifpn_utils.py
+++ b/research/object_detection/utils/bifpn_utils.py
@@ -26,7 +26,8 @@ from object_detection.utils import shape_utils
def create_conv_block(name, num_filters, kernel_size, strides, padding,
use_separable, apply_batchnorm, apply_activation,
- conv_hyperparams, is_training, freeze_batchnorm):
+ conv_hyperparams, is_training, freeze_batchnorm,
+ conv_bn_act_pattern=True):
"""Create Keras layers for regular or separable convolutions.
Args:
@@ -50,6 +51,9 @@ def create_conv_block(name, num_filters, kernel_size, strides, padding,
training or not. When training with a small batch size (e.g. 1), it is
desirable to freeze batch norm update and use pretrained batch norm
params.
+ conv_bn_act_pattern: Bool. By default, when True, the layers returned by
+ this function are in the order [conv, batchnorm, activation]. Otherwise,
+ when False, the order of the layers is [activation, conv, batchnorm].
Returns:
A list of keras layers, including (regular or seperable) convolution, and
@@ -73,7 +77,7 @@ def create_conv_block(name, num_filters, kernel_size, strides, padding,
depth_multiplier=1,
padding=padding,
strides=strides,
- name=name + '_separable_conv',
+ name=name + 'separable_conv',
**kwargs))
else:
layers.append(
@@ -82,18 +86,22 @@ def create_conv_block(name, num_filters, kernel_size, strides, padding,
kernel_size=kernel_size,
padding=padding,
strides=strides,
- name=name + '_conv',
+ name=name + 'conv',
**conv_hyperparams.params()))
if apply_batchnorm:
layers.append(
conv_hyperparams.build_batch_norm(
training=(is_training and not freeze_batchnorm),
- name=name + '_batchnorm'))
+ name=name + 'batchnorm'))
if apply_activation:
- layers.append(
- conv_hyperparams.build_activation_layer(name=name + '_activation'))
+ activation_layer = conv_hyperparams.build_activation_layer(
+ name=name + 'activation')
+ if conv_bn_act_pattern:
+ layers.append(activation_layer)
+ else:
+ layers = [activation_layer] + layers
return layers
@@ -133,28 +141,28 @@ def create_downsample_feature_map_ops(scale, downsample_method,
pool_size=kernel_size,
strides=stride,
padding=padding,
- name=name + '_downsample_max_x{}'.format(stride)))
+ name=name + 'downsample_max_x{}'.format(stride)))
elif downsample_method == 'avg_pooling':
layers.append(
tf.keras.layers.AveragePooling2D(
pool_size=kernel_size,
strides=stride,
padding=padding,
- name=name + '_downsample_avg_x{}'.format(stride)))
+ name=name + 'downsample_avg_x{}'.format(stride)))
elif downsample_method == 'depthwise_conv':
layers.append(
tf.keras.layers.DepthwiseConv2D(
kernel_size=kernel_size,
strides=stride,
padding=padding,
- name=name + '_downsample_depthwise_x{}'.format(stride)))
+ name=name + 'downsample_depthwise_x{}'.format(stride)))
layers.append(
conv_hyperparams.build_batch_norm(
training=(is_training and not freeze_batchnorm),
- name=name + '_downsample_batchnorm'))
+ name=name + 'downsample_batchnorm'))
layers.append(
conv_hyperparams.build_activation_layer(name=name +
- '_downsample_activation'))
+ 'downsample_activation'))
else:
raise ValueError('Unknown downsample method: {}'.format(downsample_method))
diff --git a/research/object_detection/utils/config_util.py b/research/object_detection/utils/config_util.py
index 71185a5a601272cb7801050c4ccca3bcb4b1be99..662d42e1305538534e1cb6671086e4faa6cdf00c 100644
--- a/research/object_detection/utils/config_util.py
+++ b/research/object_detection/utils/config_util.py
@@ -142,6 +142,35 @@ def get_configs_from_pipeline_file(pipeline_config_path, config_override=None):
return create_configs_from_pipeline_proto(pipeline_config)
+def clear_fine_tune_checkpoint(pipeline_config_path,
+ new_pipeline_config_path):
+ """Clears fine_tune_checkpoint and writes a new pipeline config file."""
+ configs = get_configs_from_pipeline_file(pipeline_config_path)
+ configs["train_config"].fine_tune_checkpoint = ""
+ configs["train_config"].load_all_detection_checkpoint_vars = False
+ pipeline_proto = create_pipeline_proto_from_configs(configs)
+ with tf.gfile.Open(new_pipeline_config_path, "wb") as f:
+ f.write(text_format.MessageToString(pipeline_proto))
+
+
+def update_fine_tune_checkpoint_type(train_config):
+ """Set `fine_tune_checkpoint_type` using `from_detection_checkpoint`.
+
+ `train_config.from_detection_checkpoint` field is deprecated. For backward
+ compatibility, this function sets `train_config.fine_tune_checkpoint_type`
+ based on `train_config.from_detection_checkpoint`.
+
+ Args:
+ train_config: train_pb2.TrainConfig proto object.
+
+ """
+ if not train_config.fine_tune_checkpoint_type:
+ if train_config.from_detection_checkpoint:
+ train_config.fine_tune_checkpoint_type = "detection"
+ else:
+ train_config.fine_tune_checkpoint_type = "classification"
+
+
def create_configs_from_pipeline_proto(pipeline_config):
"""Creates a configs dictionary from pipeline_pb2.TrainEvalPipelineConfig.
diff --git a/research/object_detection/utils/config_util_test.py b/research/object_detection/utils/config_util_test.py
index cd5f87d8d0a40e92cd20533b3eec3e5294e00640..f36970c11078b222710427b46ffd502be608c109 100644
--- a/research/object_detection/utils/config_util_test.py
+++ b/research/object_detection/utils/config_util_test.py
@@ -19,7 +19,7 @@ from __future__ import division
from __future__ import print_function
import os
-
+import unittest
from six.moves import range
import tensorflow.compat.v1 as tf
@@ -32,6 +32,7 @@ from object_detection.protos import model_pb2
from object_detection.protos import pipeline_pb2
from object_detection.protos import train_pb2
from object_detection.utils import config_util
+from object_detection.utils import tf_version
# pylint: disable=g-import-not-at-top
try:
@@ -282,18 +283,22 @@ class ConfigUtilTest(tf.test.TestCase):
self.assertAlmostEqual(hparams.learning_rate * warmup_scale_factor,
cosine_lr.warmup_learning_rate)
+ @unittest.skipIf(tf_version.is_tf2(), "Skipping TF1.X only test.")
def testRMSPropWithNewLearingRate(self):
"""Tests new learning rates for RMSProp Optimizer."""
self._assertOptimizerWithNewLearningRate("rms_prop_optimizer")
+ @unittest.skipIf(tf_version.is_tf2(), "Skipping TF1.X only test.")
def testMomentumOptimizerWithNewLearningRate(self):
"""Tests new learning rates for Momentum Optimizer."""
self._assertOptimizerWithNewLearningRate("momentum_optimizer")
+ @unittest.skipIf(tf_version.is_tf2(), "Skipping TF1.X only test.")
def testAdamOptimizerWithNewLearningRate(self):
"""Tests new learning rates for Adam Optimizer."""
self._assertOptimizerWithNewLearningRate("adam_optimizer")
+ @unittest.skipIf(tf_version.is_tf2(), "Skipping TF1.X only test.")
def testGenericConfigOverride(self):
"""Tests generic config overrides for all top-level configs."""
# Set one parameter for each of the top-level pipeline configs:
@@ -329,6 +334,7 @@ class ConfigUtilTest(tf.test.TestCase):
self.assertEqual(2,
configs["graph_rewriter_config"].quantization.weight_bits)
+ @unittest.skipIf(tf_version.is_tf2(), "Skipping TF1.X only test.")
def testNewBatchSize(self):
"""Tests that batch size is updated appropriately."""
original_batch_size = 2
@@ -344,6 +350,7 @@ class ConfigUtilTest(tf.test.TestCase):
new_batch_size = configs["train_config"].batch_size
self.assertEqual(16, new_batch_size)
+ @unittest.skipIf(tf_version.is_tf2(), "Skipping TF1.X only test.")
def testNewBatchSizeWithClipping(self):
"""Tests that batch size is clipped to 1 from below."""
original_batch_size = 2
@@ -359,6 +366,7 @@ class ConfigUtilTest(tf.test.TestCase):
new_batch_size = configs["train_config"].batch_size
self.assertEqual(1, new_batch_size) # Clipped to 1.0.
+ @unittest.skipIf(tf_version.is_tf2(), "Skipping TF1.X only test.")
def testOverwriteBatchSizeWithKeyValue(self):
"""Tests that batch size is overwritten based on key/value."""
pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
@@ -369,6 +377,7 @@ class ConfigUtilTest(tf.test.TestCase):
new_batch_size = configs["train_config"].batch_size
self.assertEqual(10, new_batch_size)
+ @unittest.skipIf(tf_version.is_tf2(), "Skipping TF1.X only test.")
def testKeyValueOverrideBadKey(self):
"""Tests that overwriting with a bad key causes an exception."""
pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
@@ -377,6 +386,7 @@ class ConfigUtilTest(tf.test.TestCase):
with self.assertRaises(ValueError):
config_util.merge_external_params_with_configs(configs, hparams)
+ @unittest.skipIf(tf_version.is_tf2(), "Skipping TF1.X only test.")
def testOverwriteBatchSizeWithBadValueType(self):
"""Tests that overwriting with a bad valuye type causes an exception."""
pipeline_config = pipeline_pb2.TrainEvalPipelineConfig()
@@ -387,6 +397,7 @@ class ConfigUtilTest(tf.test.TestCase):
with self.assertRaises(TypeError):
config_util.merge_external_params_with_configs(configs, hparams)
+ @unittest.skipIf(tf_version.is_tf2(), "Skipping TF1.X only test.")
def testNewMomentumOptimizerValue(self):
"""Tests that new momentum value is updated appropriately."""
original_momentum_value = 0.4
@@ -404,6 +415,7 @@ class ConfigUtilTest(tf.test.TestCase):
new_momentum_value = optimizer_config.momentum_optimizer_value
self.assertAlmostEqual(1.0, new_momentum_value) # Clipped to 1.0.
+ @unittest.skipIf(tf_version.is_tf2(), "Skipping TF1.X only test.")
def testNewClassificationLocalizationWeightRatio(self):
"""Tests that the loss weight ratio is updated appropriately."""
original_localization_weight = 0.1
@@ -426,6 +438,7 @@ class ConfigUtilTest(tf.test.TestCase):
self.assertAlmostEqual(1.0, loss.localization_weight)
self.assertAlmostEqual(new_weight_ratio, loss.classification_weight)
+ @unittest.skipIf(tf_version.is_tf2(), "Skipping TF1.X only test.")
def testNewFocalLossParameters(self):
"""Tests that the loss weight ratio is updated appropriately."""
original_alpha = 1.0
diff --git a/research/object_detection/utils/model_util.py b/research/object_detection/utils/model_util.py
index 6a46265c3d596230a9073c5b5b8f4bb622f346bf..bc5cfe48231bccc034147f6a06a4d5f09cca0159 100644
--- a/research/object_detection/utils/model_util.py
+++ b/research/object_detection/utils/model_util.py
@@ -54,8 +54,8 @@ def extract_submodel(model, inputs, outputs, name=None):
for layer in model.layers:
layer_output = layer.output
layer_inputs = layer.input
- output_to_layer[layer_output.ref()] = layer
- output_to_layer_input[layer_output.ref()] = layer_inputs
+ output_to_layer[layer_output.experimental_ref()] = layer
+ output_to_layer_input[layer_output.experimental_ref()] = layer_inputs
model_inputs_dict = {}
memoized_results = {}
@@ -63,21 +63,22 @@ def extract_submodel(model, inputs, outputs, name=None):
# Relies on recursion, very low limit in python
def _recurse_in_model(tensor):
"""Walk the existing model recursively to copy a submodel."""
- if tensor.ref() in memoized_results:
- return memoized_results[tensor.ref()]
- if (tensor.ref() == inputs.ref()) or (
+ if tensor.experimental_ref() in memoized_results:
+ return memoized_results[tensor.experimental_ref()]
+ if (tensor.experimental_ref() == inputs.experimental_ref()) or (
isinstance(inputs, list) and tensor in inputs):
- if tensor.ref() not in model_inputs_dict:
- model_inputs_dict[tensor.ref()] = tf.keras.layers.Input(tensor=tensor)
- out = model_inputs_dict[tensor.ref()]
+ if tensor.experimental_ref() not in model_inputs_dict:
+ model_inputs_dict[tensor.experimental_ref()] = tf.keras.layers.Input(
+ tensor=tensor)
+ out = model_inputs_dict[tensor.experimental_ref()]
else:
- cur_inputs = output_to_layer_input[tensor.ref()]
- cur_layer = output_to_layer[tensor.ref()]
+ cur_inputs = output_to_layer_input[tensor.experimental_ref()]
+ cur_layer = output_to_layer[tensor.experimental_ref()]
if isinstance(cur_inputs, list):
out = cur_layer([_recurse_in_model(inp) for inp in cur_inputs])
else:
out = cur_layer(_recurse_in_model(cur_inputs))
- memoized_results[tensor.ref()] = out
+ memoized_results[tensor.experimental_ref()] = out
return out
if isinstance(outputs, list):
@@ -86,8 +87,10 @@ def extract_submodel(model, inputs, outputs, name=None):
model_outputs = _recurse_in_model(outputs)
if isinstance(inputs, list):
- model_inputs = [model_inputs_dict[tensor.ref()] for tensor in inputs]
+ model_inputs = [
+ model_inputs_dict[tensor.experimental_ref()] for tensor in inputs
+ ]
else:
- model_inputs = model_inputs_dict[inputs.ref()]
+ model_inputs = model_inputs_dict[inputs.experimental_ref()]
return tf.keras.Model(inputs=model_inputs, outputs=model_outputs, name=name)
diff --git a/research/object_detection/utils/model_util_test.py b/research/object_detection/utils/model_util_tf2_test.py
similarity index 94%
rename from research/object_detection/utils/model_util_test.py
rename to research/object_detection/utils/model_util_tf2_test.py
index c505464c7831e812e813e085b3dc59231f66802c..77b1d01725e4dfd51176d676168657612d5826bc 100644
--- a/research/object_detection/utils/model_util_test.py
+++ b/research/object_detection/utils/model_util_tf2_test.py
@@ -19,11 +19,14 @@ from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
+import unittest
import tensorflow.compat.v1 as tf
from object_detection.utils import model_util
+from object_detection.utils import tf_version
+@unittest.skipIf(tf_version.is_tf1(), 'Skipping TF2.X only test.')
class ExtractSubmodelUtilTest(tf.test.TestCase):
def test_simple_model(self):
diff --git a/research/object_detection/utils/object_detection_evaluation_test.py b/research/object_detection/utils/object_detection_evaluation_test.py
index 5b2b5c8011fa57a18f6262b1ad895df1e9fb16a3..ff399ed4bad4d4872eb135685789678237f0e0b1 100644
--- a/research/object_detection/utils/object_detection_evaluation_test.py
+++ b/research/object_detection/utils/object_detection_evaluation_test.py
@@ -18,6 +18,8 @@
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
+
+import unittest
from absl.testing import parameterized
import numpy as np
import six
@@ -26,6 +28,7 @@ import tensorflow.compat.v1 as tf
from object_detection import eval_util
from object_detection.core import standard_fields
from object_detection.utils import object_detection_evaluation
+from object_detection.utils import tf_version
class OpenImagesV2EvaluationTest(tf.test.TestCase):
@@ -970,6 +973,8 @@ class ObjectDetectionEvaluationTest(tf.test.TestCase):
self.assertAlmostEqual(copy_mean_corloc, mean_corloc)
+@unittest.skipIf(tf_version.is_tf2(), 'Eval Metrics ops are supported in TF1.X '
+ 'only.')
class ObjectDetectionEvaluatorTest(tf.test.TestCase, parameterized.TestCase):
def setUp(self):
diff --git a/research/object_detection/utils/ops.py b/research/object_detection/utils/ops.py
index f59881580947acb010c3bc1fd308d7ea2965eeeb..0cd83d38d5a679d420bfbb3c81f0964c662f98af 100644
--- a/research/object_detection/utils/ops.py
+++ b/research/object_detection/utils/ops.py
@@ -268,7 +268,7 @@ def padded_one_hot_encoding(indices, depth, left_pad):
on_value=1, off_value=0), tf.float32)
return tf.pad(one_hot, [[0, 0], [left_pad, 0]], mode='CONSTANT')
result = tf.cond(tf.greater(tf.size(indices), 0), one_hot_and_pad,
- lambda: tf.zeros((depth + left_pad, 0)))
+ lambda: tf.zeros((tf.size(indices), depth + left_pad)))
return tf.reshape(result, [-1, depth + left_pad])
diff --git a/research/object_detection/utils/ops_test.py b/research/object_detection/utils/ops_test.py
index a7a6f8df32a4f9f881b0002d4e3d00d4830913fc..d4da7b1071b899002faa830a07686541c69d99bd 100644
--- a/research/object_detection/utils/ops_test.py
+++ b/research/object_detection/utils/ops_test.py
@@ -196,8 +196,7 @@ class OpsTestPaddedOneHotEncoding(test_case.TestCase):
[0, 0, 0, 1, 0, 0],
[0, 0, 0, 0, 0, 1]], np.float32)
- # Executing on CPU only because output shape is not constant.
- out_one_hot_tensor = self.execute_cpu(graph_fn, [])
+ out_one_hot_tensor = self.execute(graph_fn, [])
self.assertAllClose(out_one_hot_tensor, expected_tensor, rtol=1e-10,
atol=1e-10)
@@ -212,8 +211,7 @@ class OpsTestPaddedOneHotEncoding(test_case.TestCase):
[0, 0, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0],
[0, 0, 0, 0, 0, 0, 1]], np.float32)
- # Executing on CPU only because output shape is not constant.
- out_one_hot_tensor = self.execute_cpu(graph_fn, [])
+ out_one_hot_tensor = self.execute(graph_fn, [])
self.assertAllClose(out_one_hot_tensor, expected_tensor, rtol=1e-10,
atol=1e-10)
@@ -229,8 +227,7 @@ class OpsTestPaddedOneHotEncoding(test_case.TestCase):
[0, 0, 0, 0, 0, 0, 1, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 1]], np.float32)
- # executing on CPU only because output shape is not constant.
- out_one_hot_tensor = self.execute_cpu(graph_fn, [])
+ out_one_hot_tensor = self.execute(graph_fn, [])
self.assertAllClose(out_one_hot_tensor, expected_tensor, rtol=1e-10,
atol=1e-10)
@@ -246,8 +243,7 @@ class OpsTestPaddedOneHotEncoding(test_case.TestCase):
return one_hot_tensor
expected_tensor = np.zeros((0, depth + pad))
- # executing on CPU only because output shape is not constant.
- out_one_hot_tensor = self.execute_cpu(graph_fn, [])
+ out_one_hot_tensor = self.execute(graph_fn, [])
self.assertAllClose(out_one_hot_tensor, expected_tensor, rtol=1e-10,
atol=1e-10)
diff --git a/research/object_detection/utils/target_assigner_utils.py b/research/object_detection/utils/target_assigner_utils.py
index ca7918f3ddc6a6046771bda1f8039a3035b57725..0aa26a47ed75ac918a82aaee184fa2bb0dfa7127 100644
--- a/research/object_detection/utils/target_assigner_utils.py
+++ b/research/object_detection/utils/target_assigner_utils.py
@@ -118,12 +118,17 @@ def compute_floor_offsets_with_indices(y_source,
they were put on the grids) to target coordinates. Note that the input
coordinates should be the "absolute" coordinates in terms of the output image
dimensions as opposed to the normalized coordinates (i.e. values in [0, 1]).
+ If the input y and x source have the second dimension (representing the
+ neighboring pixels), then the offsets are computed from each of the
+ neighboring pixels to their corresponding target (first dimension).
Args:
- y_source: A tensor with shape [num_points] representing the absolute
- y-coordinates (in the output image space) of the source points.
- x_source: A tensor with shape [num_points] representing the absolute
- x-coordinates (in the output image space) of the source points.
+ y_source: A tensor with shape [num_points] (or [num_points, num_neighbors])
+ representing the absolute y-coordinates (in the output image space) of the
+ source points.
+ x_source: A tensor with shape [num_points] (or [num_points, num_neighbors])
+ representing the absolute x-coordinates (in the output image space) of the
+ source points.
y_target: A tensor with shape [num_points] representing the absolute
y-coordinates (in the output image space) of the target points. If not
provided, then y_source is used as the targets.
@@ -133,18 +138,33 @@ def compute_floor_offsets_with_indices(y_source,
Returns:
A tuple of two tensors:
- offsets: A tensor with shape [num_points, 2] representing the offsets of
- each input point.
- indices: A tensor with shape [num_points, 2] representing the indices of
- where the offsets should be retrieved in the output image dimension
- space.
+ offsets: A tensor with shape [num_points, 2] (or
+ [num_points, num_neighbors, 2]) representing the offsets of each input
+ point.
+ indices: A tensor with shape [num_points, 2] (or
+ [num_points, num_neighbors, 2]) representing the indices of where the
+ offsets should be retrieved in the output image dimension space.
+
+ Raise:
+ ValueError: source and target shapes have unexpected values.
"""
y_source_floored = tf.floor(y_source)
x_source_floored = tf.floor(x_source)
- if y_target is None:
+
+ source_shape = shape_utils.combined_static_and_dynamic_shape(y_source)
+ if y_target is None and x_target is None:
y_target = y_source
- if x_target is None:
x_target = x_source
+ else:
+ target_shape = shape_utils.combined_static_and_dynamic_shape(y_target)
+ if len(source_shape) == 2 and len(target_shape) == 1:
+ _, num_neighbors = source_shape
+ y_target = tf.tile(
+ tf.expand_dims(y_target, -1), multiples=[1, num_neighbors])
+ x_target = tf.tile(
+ tf.expand_dims(x_target, -1), multiples=[1, num_neighbors])
+ elif source_shape != target_shape:
+ raise ValueError('Inconsistent source and target shape.')
y_offset = y_target - y_source_floored
x_offset = x_target - x_source_floored
@@ -152,9 +172,8 @@ def compute_floor_offsets_with_indices(y_source,
y_source_indices = tf.cast(y_source_floored, tf.int32)
x_source_indices = tf.cast(x_source_floored, tf.int32)
- indices = tf.stack([y_source_indices, x_source_indices], axis=1)
- offsets = tf.stack([y_offset, x_offset], axis=1)
-
+ indices = tf.stack([y_source_indices, x_source_indices], axis=-1)
+ offsets = tf.stack([y_offset, x_offset], axis=-1)
return offsets, indices
@@ -231,6 +250,12 @@ def blackout_pixel_weights_by_box_regions(height, width, boxes, blackout):
A float tensor with shape [height, width] where all values within the
regions of the blackout boxes are 0.0 and 1.0 else where.
"""
+ num_instances, _ = shape_utils.combined_static_and_dynamic_shape(boxes)
+ # If no annotation instance is provided, return all ones (instead of
+ # unexpected values) to avoid NaN loss value.
+ if num_instances == 0:
+ return tf.ones([height, width], dtype=tf.float32)
+
(y_grid, x_grid) = image_shape_to_grids(height, width)
y_grid = tf.expand_dims(y_grid, axis=0)
x_grid = tf.expand_dims(x_grid, axis=0)
@@ -257,3 +282,72 @@ def blackout_pixel_weights_by_box_regions(height, width, boxes, blackout):
out_boxes = tf.reduce_max(selected_in_boxes, axis=0)
out_boxes = tf.ones_like(out_boxes) - out_boxes
return out_boxes
+
+
+def _get_yx_indices_offset_by_radius(radius):
+ """Gets the y and x index offsets that are within the radius."""
+ y_offsets = []
+ x_offsets = []
+ for y_offset in range(-radius, radius + 1, 1):
+ for x_offset in range(-radius, radius + 1, 1):
+ if x_offset ** 2 + y_offset ** 2 <= radius ** 2:
+ y_offsets.append(y_offset)
+ x_offsets.append(x_offset)
+ return (tf.constant(y_offsets, dtype=tf.float32),
+ tf.constant(x_offsets, dtype=tf.float32))
+
+
+def get_surrounding_grids(height, width, y_coordinates, x_coordinates, radius):
+ """Gets the indices of the surrounding pixels of the input y, x coordinates.
+
+ This function returns the pixel indices corresponding to the (floor of the)
+ input coordinates and their surrounding pixels within the radius. If the
+ radius is set to 0, then only the pixels that correspond to the floor of the
+ coordinates will be returned. If the radius is larger than 0, then all of the
+ pixels within the radius of the "floor pixels" will also be returned. For
+ example, if the input coorindate is [2.1, 3.5] and radius is 1, then the five
+ pixel indices will be returned: [2, 3], [1, 3], [2, 2], [2, 4], [3, 3]. Also,
+ if the surrounding pixels are outside of valid image region, then the returned
+ pixel indices will be [0, 0] and its corresponding "valid" value will be
+ False.
+
+ Args:
+ height: int, the height of the output image.
+ width: int, the width of the output image.
+ y_coordinates: A tensor with shape [num_points] representing the absolute
+ y-coordinates (in the output image space) of the points.
+ x_coordinates: A tensor with shape [num_points] representing the absolute
+ x-coordinates (in the output image space) of the points.
+ radius: int, the radius of the neighboring pixels to be considered and
+ returned. If set to 0, then only the pixel indices corresponding to the
+ floor of the input coordinates will be returned.
+
+ Returns:
+ A tuple of three tensors:
+ y_indices: A [num_points, num_neighbors] float tensor representing the
+ pixel y indices corresponding to the input points within radius. The
+ "num_neighbors" is determined by the size of the radius.
+ x_indices: A [num_points, num_neighbors] float tensor representing the
+ pixel x indices corresponding to the input points within radius. The
+ "num_neighbors" is determined by the size of the radius.
+ valid: A [num_points, num_neighbors] boolean tensor representing whether
+ each returned index is in valid image region or not.
+ """
+ # Floored y, x: [num_points, 1].
+ y_center = tf.expand_dims(tf.math.floor(y_coordinates), axis=-1)
+ x_center = tf.expand_dims(tf.math.floor(x_coordinates), axis=-1)
+ y_offsets, x_offsets = _get_yx_indices_offset_by_radius(radius)
+ # Indices offsets: [1, num_neighbors].
+ y_offsets = tf.expand_dims(y_offsets, axis=0)
+ x_offsets = tf.expand_dims(x_offsets, axis=0)
+
+ # Floor + offsets: [num_points, num_neighbors].
+ y_output = y_center + y_offsets
+ x_output = x_center + x_offsets
+ default_output = tf.zeros_like(y_output)
+ valid = tf.logical_and(
+ tf.logical_and(x_output >= 0, x_output < width),
+ tf.logical_and(y_output >= 0, y_output < height))
+ y_output = tf.where(valid, y_output, default_output)
+ x_output = tf.where(valid, x_output, default_output)
+ return (y_output, x_output, valid)
diff --git a/research/object_detection/utils/target_assigner_utils_test.py b/research/object_detection/utils/target_assigner_utils_test.py
index b895cca012128078641cf8e70d20c201f3a51a4d..f663445324d7ee648130018b522fdcbaaeb74d54 100644
--- a/research/object_detection/utils/target_assigner_utils_test.py
+++ b/research/object_detection/utils/target_assigner_utils_test.py
@@ -87,8 +87,32 @@ class TargetUtilTest(test_case.TestCase):
np.testing.assert_array_almost_equal(offsets,
np.array([[1.1, -0.8], [0.1, 0.5]]))
- np.testing.assert_array_almost_equal(indices,
- np.array([[1, 2], [0, 4]]))
+ np.testing.assert_array_almost_equal(indices, np.array([[1, 2], [0, 4]]))
+
+ def test_compute_floor_offsets_with_indices_multisources(self):
+
+ def graph_fn():
+ y_source = tf.constant([[1.0, 0.0], [2.0, 3.0]], dtype=tf.float32)
+ x_source = tf.constant([[2.0, 4.0], [3.0, 3.0]], dtype=tf.float32)
+ y_target = tf.constant([2.1, 0.1], dtype=tf.float32)
+ x_target = tf.constant([1.2, 4.5], dtype=tf.float32)
+ (offsets, indices) = ta_utils.compute_floor_offsets_with_indices(
+ y_source, x_source, y_target, x_target)
+ return offsets, indices
+
+ offsets, indices = self.execute(graph_fn, [])
+ # Offset from the first source to target.
+ np.testing.assert_array_almost_equal(offsets[:, 0, :],
+ np.array([[1.1, -0.8], [-1.9, 1.5]]))
+ # Offset from the second source to target.
+ np.testing.assert_array_almost_equal(offsets[:, 1, :],
+ np.array([[2.1, -2.8], [-2.9, 1.5]]))
+ # Indices from the first source to target.
+ np.testing.assert_array_almost_equal(indices[:, 0, :],
+ np.array([[1, 2], [2, 3]]))
+ # Indices from the second source to target.
+ np.testing.assert_array_almost_equal(indices[:, 1, :],
+ np.array([[0, 4], [3, 3]]))
def test_get_valid_keypoints_mask(self):
@@ -174,6 +198,44 @@ class TargetUtilTest(test_case.TestCase):
# 20 * 10 - 6 * 6 - 3 * 7 = 143.0
self.assertAlmostEqual(np.sum(output), 143.0)
+ def test_blackout_pixel_weights_by_box_regions_zero_instance(self):
+ def graph_fn():
+ boxes = tf.zeros([0, 4], dtype=tf.float32)
+ blackout = tf.zeros([0], dtype=tf.bool)
+ blackout_pixel_weights_by_box_regions = tf.function(
+ ta_utils.blackout_pixel_weights_by_box_regions)
+ output = blackout_pixel_weights_by_box_regions(10, 20, boxes, blackout)
+ return output
+
+ output = self.execute(graph_fn, [])
+ # The output should be all 1s since there's no annotation provided.
+ np.testing.assert_array_equal(output, np.ones([10, 20], dtype=np.float32))
+
+ def test_get_surrounding_grids(self):
+
+ def graph_fn():
+ y_coordinates = tf.constant([0.5], dtype=tf.float32)
+ x_coordinates = tf.constant([4.5], dtype=tf.float32)
+ output = ta_utils.get_surrounding_grids(
+ height=3,
+ width=5,
+ y_coordinates=y_coordinates,
+ x_coordinates=x_coordinates,
+ radius=1)
+ return output
+
+ y_indices, x_indices, valid = self.execute(graph_fn, [])
+
+ # Five neighboring indices: [-1, 4] (out of bound), [0, 3], [0, 4],
+ # [0, 5] (out of bound), [1, 4].
+ np.testing.assert_array_almost_equal(
+ y_indices,
+ np.array([[0.0, 0.0, 0.0, 0.0, 1.0]]))
+ np.testing.assert_array_almost_equal(
+ x_indices,
+ np.array([[0.0, 3.0, 4.0, 0.0, 4.0]]))
+ self.assertAllEqual(valid, [[False, True, True, False, True]])
+
if __name__ == '__main__':
tf.test.main()
diff --git a/research/object_detection/utils/test_utils.py b/research/object_detection/utils/test_utils.py
index f7e92c0bfda9099580a5761d6dcc526608b2ffd0..666a29adbad262054e039c30fb9deb52e66ac665 100644
--- a/research/object_detection/utils/test_utils.py
+++ b/research/object_detection/utils/test_utils.py
@@ -271,3 +271,19 @@ class GraphContextOrNone(object):
return False
else:
return self.graph.__exit__(ttype, value, traceback)
+
+
+def image_with_dynamic_shape(height, width, channels):
+ """Returns a single image with dynamic shape."""
+ h = tf.random.uniform([], minval=height, maxval=height+1, dtype=tf.int32)
+ w = tf.random.uniform([], minval=width, maxval=width+1, dtype=tf.int32)
+ image = tf.random.uniform([h, w, channels])
+ return image
+
+
+def keypoints_with_dynamic_shape(num_instances, num_keypoints, num_coordinates):
+ """Returns keypoints with dynamic shape."""
+ n = tf.random.uniform([], minval=num_instances, maxval=num_instances+1,
+ dtype=tf.int32)
+ keypoints = tf.random.uniform([n, num_keypoints, num_coordinates])
+ return keypoints
diff --git a/research/object_detection/utils/variables_helper.py b/research/object_detection/utils/variables_helper.py
index 327f3b679a2fb6abe48da4894ae7ea9ab4f70afa..17b63eb70bfdda9156d51bbfb15281c206ab9ed4 100644
--- a/research/object_detection/utils/variables_helper.py
+++ b/research/object_detection/utils/variables_helper.py
@@ -47,8 +47,6 @@ def filter_variables(variables, filter_regex_list, invert=False):
Returns:
a list of filtered variables.
"""
- if tf.executing_eagerly():
- raise ValueError('Accessing variables is not supported in eager mode.')
kept_vars = []
variables_to_ignore_patterns = list([fre for fre in filter_regex_list if fre])
for var in variables:
@@ -74,8 +72,6 @@ def multiply_gradients_matching_regex(grads_and_vars, regex_list, multiplier):
Returns:
grads_and_vars: A list of gradient to variable pairs (tuples).
"""
- if tf.executing_eagerly():
- raise ValueError('Accessing variables is not supported in eager mode.')
variables = [pair[1] for pair in grads_and_vars]
matching_vars = filter_variables(variables, regex_list, invert=True)
for var in matching_vars:
@@ -97,8 +93,6 @@ def freeze_gradients_matching_regex(grads_and_vars, regex_list):
grads_and_vars: A list of gradient to variable pairs (tuples) that do not
contain the variables and gradients matching the regex.
"""
- if tf.executing_eagerly():
- raise ValueError('Accessing variables is not supported in eager mode.')
variables = [pair[1] for pair in grads_and_vars]
matching_vars = filter_variables(variables, regex_list, invert=True)
kept_grads_and_vars = [pair for pair in grads_and_vars
@@ -129,8 +123,6 @@ def get_variables_available_in_checkpoint(variables,
Raises:
ValueError: if `variables` is not a list or dict.
"""
- if tf.executing_eagerly():
- raise ValueError('Accessing variables is not supported in eager mode.')
if isinstance(variables, list):
variable_names_map = {}
for variable in variables:
@@ -178,8 +170,6 @@ def get_global_variables_safely():
Returns:
The result of tf.global_variables()
"""
- if tf.executing_eagerly():
- raise ValueError('Accessing variables is not supported in eager mode.')
with tf.init_scope():
if tf.executing_eagerly():
raise ValueError("Global variables collection is not tracked when "
diff --git a/research/object_detection/utils/variables_helper_test.py b/research/object_detection/utils/variables_helper_tf1_test.py
similarity index 96%
rename from research/object_detection/utils/variables_helper_test.py
rename to research/object_detection/utils/variables_helper_tf1_test.py
index 44e72d0d1a588507378166083de4ab4c61e83304..a8bd43ed9ab5a5b57b72733877b93fd39662a885 100644
--- a/research/object_detection/utils/variables_helper_test.py
+++ b/research/object_detection/utils/variables_helper_tf1_test.py
@@ -20,13 +20,15 @@ from __future__ import division
from __future__ import print_function
import os
-
+import unittest
import tensorflow.compat.v1 as tf
from object_detection.utils import test_case
+from object_detection.utils import tf_version
from object_detection.utils import variables_helper
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.')
class FilterVariablesTest(test_case.TestCase):
def _create_variables(self):
@@ -68,6 +70,7 @@ class FilterVariablesTest(test_case.TestCase):
self.assertCountEqual(out_variables, [variables[1], variables[3]])
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.')
class MultiplyGradientsMatchingRegexTest(tf.test.TestCase):
def _create_grads_and_vars(self):
@@ -107,6 +110,7 @@ class MultiplyGradientsMatchingRegexTest(tf.test.TestCase):
self.assertCountEqual(output, exp_output)
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.')
class FreezeGradientsMatchingRegexTest(test_case.TestCase):
def _create_grads_and_vars(self):
@@ -132,6 +136,7 @@ class FreezeGradientsMatchingRegexTest(test_case.TestCase):
self.assertCountEqual(output, exp_output)
+@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.')
class GetVariablesAvailableInCheckpointTest(test_case.TestCase):
def test_return_all_variables_from_checkpoint(self):
diff --git a/research/sentiment_analysis/sentiment_main.py b/research/sentiment_analysis/sentiment_main.py
index a749d25f3098e071b630f07bac81450b40ade9ba..8b9ba5f921eef72377b480669bd81087fd1b160a 100644
--- a/research/sentiment_analysis/sentiment_main.py
+++ b/research/sentiment_analysis/sentiment_main.py
@@ -10,17 +10,20 @@ from __future__ import division
from __future__ import print_function
import argparse
+import os
import tensorflow as tf
from data import dataset
import sentiment_model
+
+
_DROPOUT_RATE = 0.95
def run_model(dataset_name, emb_dim, voc_size, sen_len,
- hid_dim, batch_size, epochs):
+ hid_dim, batch_size, epochs, model_save_dir):
"""Run training loop and an evaluation at the end.
Args:
@@ -48,9 +51,23 @@ def run_model(dataset_name, emb_dim, voc_size, sen_len,
x_train, y_train, x_test, y_test = dataset.load(
dataset_name, voc_size, sen_len)
+ if not os.path.exists(model_save_dir):
+ os.makedirs(model_save_dir)
+
+ filepath=model_save_dir+"/model-{epoch:02d}.hdf5"
+
+ checkpoint_callback = tf.keras.callbacks.ModelCheckpoint(filepath, monitor='val_accuracy',
+ verbose=1,save_best_only=True,
+ save_weights_only=True,mode='auto')
+
+
model.fit(x_train, y_train, batch_size=batch_size,
- validation_split=0.4, epochs=epochs)
+ validation_split=0.4, epochs=epochs, callbacks=[checkpoint_callback])
+
score = model.evaluate(x_test, y_test, batch_size=batch_size)
+
+ model.save(os.path.join(model_save_dir, "full-model.h5"))
+
tf.logging.info("Score: {}".format(score))
if __name__ == "__main__":
@@ -85,8 +102,14 @@ if __name__ == "__main__":
help="The number of epochs for training.",
type=int, default=55)
+ parser.add_argument("-f", "--folder",
+ help="folder/dir to save trained model",
+ type=str, default=None)
args = parser.parse_args()
+ if args.folder is None:
+ parser.error("-f argument folder/dir to save is None,provide path to save model.")
+
run_model(args.dataset, args.embedding_dim, args.vocabulary_size,
args.sentence_length, args.hidden_dim,
- args.batch_size, args.epochs)
+ args.batch_size, args.epochs, args.folder)